var/home/core/zuul-output/0000755000175000017500000000000015071674740014540 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015071705672015503 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005114553715071705664017723 0ustar rootrootOct 09 09:04:35 crc systemd[1]: Starting Kubernetes Kubelet... Oct 09 09:04:35 crc restorecon[4509]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:35 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 09:04:36 crc restorecon[4509]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 09 09:04:36 crc restorecon[4509]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 09 09:04:36 crc kubenswrapper[4710]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 09 09:04:36 crc kubenswrapper[4710]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 09 09:04:36 crc kubenswrapper[4710]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 09 09:04:36 crc kubenswrapper[4710]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 09 09:04:36 crc kubenswrapper[4710]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 09 09:04:36 crc kubenswrapper[4710]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.693050 4710 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695572 4710 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695588 4710 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695592 4710 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695597 4710 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695600 4710 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695604 4710 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695609 4710 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695613 4710 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695617 4710 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695621 4710 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695625 4710 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695629 4710 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695633 4710 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695636 4710 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695640 4710 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695643 4710 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695646 4710 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695651 4710 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695655 4710 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695659 4710 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695662 4710 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695666 4710 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695669 4710 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695678 4710 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695682 4710 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695685 4710 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695688 4710 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695692 4710 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695695 4710 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695698 4710 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695701 4710 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695705 4710 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695708 4710 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695711 4710 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695714 4710 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695717 4710 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695721 4710 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695724 4710 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695727 4710 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695731 4710 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695734 4710 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695737 4710 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695746 4710 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695750 4710 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695753 4710 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695756 4710 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695759 4710 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695762 4710 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695765 4710 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695768 4710 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695771 4710 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695774 4710 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695777 4710 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695781 4710 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695784 4710 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695787 4710 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695790 4710 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695794 4710 feature_gate.go:330] unrecognized feature gate: Example Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695800 4710 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695803 4710 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695806 4710 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695809 4710 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695813 4710 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695816 4710 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695819 4710 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695822 4710 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695826 4710 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695830 4710 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695834 4710 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695837 4710 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.695843 4710 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696814 4710 flags.go:64] FLAG: --address="0.0.0.0" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696827 4710 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696840 4710 flags.go:64] FLAG: --anonymous-auth="true" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696846 4710 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696851 4710 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696855 4710 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696860 4710 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696873 4710 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696877 4710 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696881 4710 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696885 4710 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696889 4710 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696893 4710 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696897 4710 flags.go:64] FLAG: --cgroup-root="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696901 4710 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696905 4710 flags.go:64] FLAG: --client-ca-file="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696908 4710 flags.go:64] FLAG: --cloud-config="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696912 4710 flags.go:64] FLAG: --cloud-provider="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696915 4710 flags.go:64] FLAG: --cluster-dns="[]" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696923 4710 flags.go:64] FLAG: --cluster-domain="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696927 4710 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696931 4710 flags.go:64] FLAG: --config-dir="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696934 4710 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696938 4710 flags.go:64] FLAG: --container-log-max-files="5" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696943 4710 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696947 4710 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696951 4710 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696955 4710 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696959 4710 flags.go:64] FLAG: --contention-profiling="false" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696963 4710 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696967 4710 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696972 4710 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696976 4710 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696981 4710 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696985 4710 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696989 4710 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696992 4710 flags.go:64] FLAG: --enable-load-reader="false" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.696996 4710 flags.go:64] FLAG: --enable-server="true" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697000 4710 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697010 4710 flags.go:64] FLAG: --event-burst="100" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697014 4710 flags.go:64] FLAG: --event-qps="50" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697018 4710 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697023 4710 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697032 4710 flags.go:64] FLAG: --eviction-hard="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697037 4710 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697041 4710 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697045 4710 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697049 4710 flags.go:64] FLAG: --eviction-soft="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697053 4710 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697057 4710 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697061 4710 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697064 4710 flags.go:64] FLAG: --experimental-mounter-path="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697068 4710 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697071 4710 flags.go:64] FLAG: --fail-swap-on="true" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697075 4710 flags.go:64] FLAG: --feature-gates="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697079 4710 flags.go:64] FLAG: --file-check-frequency="20s" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697083 4710 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697087 4710 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697091 4710 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697095 4710 flags.go:64] FLAG: --healthz-port="10248" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697099 4710 flags.go:64] FLAG: --help="false" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697102 4710 flags.go:64] FLAG: --hostname-override="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697106 4710 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697110 4710 flags.go:64] FLAG: --http-check-frequency="20s" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697114 4710 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697118 4710 flags.go:64] FLAG: --image-credential-provider-config="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697122 4710 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697126 4710 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697129 4710 flags.go:64] FLAG: --image-service-endpoint="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697133 4710 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697137 4710 flags.go:64] FLAG: --kube-api-burst="100" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697140 4710 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697144 4710 flags.go:64] FLAG: --kube-api-qps="50" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697148 4710 flags.go:64] FLAG: --kube-reserved="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697151 4710 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697155 4710 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697159 4710 flags.go:64] FLAG: --kubelet-cgroups="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697162 4710 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697166 4710 flags.go:64] FLAG: --lock-file="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697175 4710 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697179 4710 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697183 4710 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697189 4710 flags.go:64] FLAG: --log-json-split-stream="false" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697193 4710 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697196 4710 flags.go:64] FLAG: --log-text-split-stream="false" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697200 4710 flags.go:64] FLAG: --logging-format="text" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697204 4710 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697208 4710 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697221 4710 flags.go:64] FLAG: --manifest-url="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697225 4710 flags.go:64] FLAG: --manifest-url-header="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697230 4710 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697234 4710 flags.go:64] FLAG: --max-open-files="1000000" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697239 4710 flags.go:64] FLAG: --max-pods="110" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697243 4710 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697247 4710 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697251 4710 flags.go:64] FLAG: --memory-manager-policy="None" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697256 4710 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697260 4710 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697264 4710 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697268 4710 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697276 4710 flags.go:64] FLAG: --node-status-max-images="50" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697280 4710 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697284 4710 flags.go:64] FLAG: --oom-score-adj="-999" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697288 4710 flags.go:64] FLAG: --pod-cidr="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697291 4710 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697302 4710 flags.go:64] FLAG: --pod-manifest-path="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697306 4710 flags.go:64] FLAG: --pod-max-pids="-1" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697310 4710 flags.go:64] FLAG: --pods-per-core="0" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697314 4710 flags.go:64] FLAG: --port="10250" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697318 4710 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697322 4710 flags.go:64] FLAG: --provider-id="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697326 4710 flags.go:64] FLAG: --qos-reserved="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697330 4710 flags.go:64] FLAG: --read-only-port="10255" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697334 4710 flags.go:64] FLAG: --register-node="true" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697338 4710 flags.go:64] FLAG: --register-schedulable="true" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697348 4710 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697355 4710 flags.go:64] FLAG: --registry-burst="10" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697359 4710 flags.go:64] FLAG: --registry-qps="5" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697363 4710 flags.go:64] FLAG: --reserved-cpus="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697367 4710 flags.go:64] FLAG: --reserved-memory="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697372 4710 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697375 4710 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697379 4710 flags.go:64] FLAG: --rotate-certificates="false" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697383 4710 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697387 4710 flags.go:64] FLAG: --runonce="false" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697390 4710 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697394 4710 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697398 4710 flags.go:64] FLAG: --seccomp-default="false" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697402 4710 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697406 4710 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697410 4710 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697414 4710 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697418 4710 flags.go:64] FLAG: --storage-driver-password="root" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697422 4710 flags.go:64] FLAG: --storage-driver-secure="false" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697425 4710 flags.go:64] FLAG: --storage-driver-table="stats" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697443 4710 flags.go:64] FLAG: --storage-driver-user="root" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697446 4710 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697450 4710 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697454 4710 flags.go:64] FLAG: --system-cgroups="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697457 4710 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697463 4710 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697467 4710 flags.go:64] FLAG: --tls-cert-file="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697470 4710 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697478 4710 flags.go:64] FLAG: --tls-min-version="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697482 4710 flags.go:64] FLAG: --tls-private-key-file="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697486 4710 flags.go:64] FLAG: --topology-manager-policy="none" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697490 4710 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697494 4710 flags.go:64] FLAG: --topology-manager-scope="container" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697498 4710 flags.go:64] FLAG: --v="2" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697503 4710 flags.go:64] FLAG: --version="false" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697508 4710 flags.go:64] FLAG: --vmodule="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697520 4710 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697524 4710 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697630 4710 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697635 4710 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697638 4710 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697641 4710 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697645 4710 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697648 4710 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697651 4710 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697655 4710 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697658 4710 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697662 4710 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697665 4710 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697669 4710 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697672 4710 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697675 4710 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697679 4710 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697682 4710 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697686 4710 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697689 4710 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697692 4710 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697695 4710 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697699 4710 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697702 4710 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697705 4710 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697708 4710 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697712 4710 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697716 4710 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697720 4710 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697724 4710 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697727 4710 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697730 4710 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697733 4710 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697737 4710 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697740 4710 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697743 4710 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697751 4710 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697755 4710 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697758 4710 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697761 4710 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697767 4710 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697770 4710 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697773 4710 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697776 4710 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697780 4710 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697783 4710 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697786 4710 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697789 4710 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697792 4710 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697796 4710 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697799 4710 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697802 4710 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697805 4710 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697808 4710 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697811 4710 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697815 4710 feature_gate.go:330] unrecognized feature gate: Example Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697818 4710 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697821 4710 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697824 4710 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697827 4710 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697830 4710 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697833 4710 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697836 4710 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697840 4710 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697843 4710 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697847 4710 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697851 4710 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697855 4710 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697858 4710 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697862 4710 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697867 4710 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697877 4710 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.697887 4710 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.697898 4710 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.703653 4710 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.703675 4710 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704077 4710 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704092 4710 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704096 4710 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704101 4710 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704105 4710 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704109 4710 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704113 4710 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704116 4710 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704120 4710 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704123 4710 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704126 4710 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704129 4710 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704132 4710 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704136 4710 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704140 4710 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704143 4710 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704147 4710 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704150 4710 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704153 4710 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704157 4710 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704161 4710 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704164 4710 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704167 4710 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704171 4710 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704174 4710 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704177 4710 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704181 4710 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704184 4710 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704187 4710 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704191 4710 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704194 4710 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704198 4710 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704201 4710 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704204 4710 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704209 4710 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704224 4710 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704228 4710 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704232 4710 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704237 4710 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704241 4710 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704245 4710 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704248 4710 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704251 4710 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704255 4710 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704258 4710 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704262 4710 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704267 4710 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704271 4710 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704276 4710 feature_gate.go:330] unrecognized feature gate: Example Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704280 4710 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704283 4710 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704286 4710 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704289 4710 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704292 4710 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704295 4710 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704299 4710 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704302 4710 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704305 4710 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704309 4710 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704314 4710 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704318 4710 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704322 4710 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704326 4710 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704329 4710 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704332 4710 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704335 4710 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704338 4710 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704342 4710 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704345 4710 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704348 4710 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704352 4710 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.704357 4710 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704483 4710 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704490 4710 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704493 4710 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704497 4710 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704501 4710 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704504 4710 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704508 4710 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704511 4710 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704515 4710 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704519 4710 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704522 4710 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704526 4710 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704529 4710 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704532 4710 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704535 4710 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704538 4710 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704542 4710 feature_gate.go:330] unrecognized feature gate: Example Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704545 4710 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704548 4710 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704552 4710 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704556 4710 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704560 4710 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704563 4710 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704567 4710 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704571 4710 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704574 4710 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704577 4710 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704580 4710 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704584 4710 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704588 4710 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704592 4710 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704595 4710 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704599 4710 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704602 4710 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704606 4710 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704609 4710 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704613 4710 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704616 4710 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704619 4710 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704623 4710 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704626 4710 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704630 4710 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704633 4710 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704636 4710 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704640 4710 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704643 4710 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704646 4710 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704650 4710 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704653 4710 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704656 4710 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704660 4710 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704663 4710 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704666 4710 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704669 4710 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704673 4710 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704676 4710 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704680 4710 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704683 4710 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704686 4710 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704689 4710 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704692 4710 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704695 4710 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704698 4710 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704702 4710 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704706 4710 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704709 4710 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704712 4710 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704715 4710 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704720 4710 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704724 4710 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.704728 4710 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.704734 4710 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.705225 4710 server.go:940] "Client rotation is on, will bootstrap in background" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.707763 4710 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.707826 4710 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.708800 4710 server.go:997] "Starting client certificate rotation" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.708834 4710 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.708969 4710 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-11 00:31:23.151461563 +0000 UTC Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.709033 4710 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 2247h26m46.442430834s for next certificate rotation Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.721246 4710 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.722537 4710 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.733025 4710 log.go:25] "Validated CRI v1 runtime API" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.750731 4710 log.go:25] "Validated CRI v1 image API" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.752501 4710 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.755827 4710 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-09-09-00-17-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.755847 4710 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:49 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm:{mountpoint:/var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm major:0 minor:42 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:50 fsType:tmpfs blockSize:0} overlay_0-43:{mountpoint:/var/lib/containers/storage/overlay/94b752e0a51c0134b00ddef6dc7a933a9d7c1d9bdc88a18dae4192a0d557d623/merged major:0 minor:43 fsType:overlay blockSize:0}] Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.764875 4710 manager.go:217] Machine: {Timestamp:2025-10-09 09:04:36.763769093 +0000 UTC m=+0.253877489 CPUVendorID:AuthenticAMD NumCores:8 NumPhysicalCores:1 NumSockets:8 CpuFrequency:2445406 MemoryCapacity:25199480832 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:7ac7f3a6-4575-4e30-8696-d94e384307e4 BootID:22e9dfd4-7a0f-4a29-94b5-6d66f0b46200 Filesystems:[{Device:/run/user/1000 DeviceMajor:0 DeviceMinor:49 Capacity:2519945216 Type:vfs Inodes:615221 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:50 Capacity:1073741824 Type:vfs Inodes:3076108 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:12599738368 Type:vfs Inodes:3076108 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:5039898624 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm DeviceMajor:0 DeviceMinor:42 Capacity:65536000 Type:vfs Inodes:3076108 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:12599742464 Type:vfs Inodes:1048576 HasInodes:true} {Device:overlay_0-43 DeviceMajor:0 DeviceMinor:43 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:429496729600 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:55:b5:ae Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:enp3s0 MacAddress:fa:16:3e:55:b5:ae Speed:-1 Mtu:1500} {Name:enp7s0 MacAddress:fa:16:3e:8b:db:36 Speed:-1 Mtu:1440} {Name:enp7s0.20 MacAddress:52:54:00:23:e2:f0 Speed:-1 Mtu:1436} {Name:enp7s0.21 MacAddress:52:54:00:6d:16:06 Speed:-1 Mtu:1436} {Name:enp7s0.22 MacAddress:52:54:00:44:35:b0 Speed:-1 Mtu:1436} {Name:enp7s0.23 MacAddress:52:54:00:68:26:23 Speed:-1 Mtu:1436} {Name:eth10 MacAddress:82:ca:fe:d4:8d:a9 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:32:d4:f4:34:c6:4e Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:25199480832 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:65536 Type:Data Level:1} {Id:0 Size:65536 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:65536 Type:Data Level:1} {Id:1 Size:65536 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:65536 Type:Data Level:1} {Id:2 Size:65536 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:65536 Type:Data Level:1} {Id:3 Size:65536 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:65536 Type:Data Level:1} {Id:4 Size:65536 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:65536 Type:Data Level:1} {Id:5 Size:65536 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:65536 Type:Data Level:1} {Id:6 Size:65536 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:65536 Type:Data Level:1} {Id:7 Size:65536 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.765030 4710 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.765105 4710 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.765878 4710 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.766026 4710 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.766055 4710 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.766208 4710 topology_manager.go:138] "Creating topology manager with none policy" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.766229 4710 container_manager_linux.go:303] "Creating device plugin manager" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.766515 4710 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.766544 4710 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.766831 4710 state_mem.go:36] "Initialized new in-memory state store" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.766904 4710 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.768476 4710 kubelet.go:418] "Attempting to sync node with API server" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.768496 4710 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.768517 4710 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.768528 4710 kubelet.go:324] "Adding apiserver pod source" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.768537 4710 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.770378 4710 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.771109 4710 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.772484 4710 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.773351 4710 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 192.168.26.166:6443: connect: connection refused Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.773366 4710 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 192.168.26.166:6443: connect: connection refused Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.773401 4710 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 09 09:04:36 crc kubenswrapper[4710]: E1009 09:04:36.773405 4710 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 192.168.26.166:6443: connect: connection refused" logger="UnhandledError" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.773418 4710 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 09 09:04:36 crc kubenswrapper[4710]: E1009 09:04:36.773418 4710 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 192.168.26.166:6443: connect: connection refused" logger="UnhandledError" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.773450 4710 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.773457 4710 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.773468 4710 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.773474 4710 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.773480 4710 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.773489 4710 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.773497 4710 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.773504 4710 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.773513 4710 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.773519 4710 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.773904 4710 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.774264 4710 server.go:1280] "Started kubelet" Oct 09 09:04:36 crc systemd[1]: Started Kubernetes Kubelet. Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.775780 4710 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.775871 4710 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.776077 4710 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.776147 4710 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 192.168.26.166:6443: connect: connection refused Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.780587 4710 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.780670 4710 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 09 09:04:36 crc kubenswrapper[4710]: E1009 09:04:36.780640 4710 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 192.168.26.166:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186cc74a679362b1 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-09 09:04:36.774240945 +0000 UTC m=+0.264349342,LastTimestamp:2025-10-09 09:04:36.774240945 +0000 UTC m=+0.264349342,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.781351 4710 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 06:11:27.502590718 +0000 UTC Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.781377 4710 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1509h6m50.721216977s for next certificate rotation Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.782325 4710 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.782344 4710 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.782405 4710 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 09 09:04:36 crc kubenswrapper[4710]: E1009 09:04:36.782546 4710 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 09 09:04:36 crc kubenswrapper[4710]: E1009 09:04:36.782618 4710 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.166:6443: connect: connection refused" interval="200ms" Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.782938 4710 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 192.168.26.166:6443: connect: connection refused Oct 09 09:04:36 crc kubenswrapper[4710]: E1009 09:04:36.783593 4710 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 192.168.26.166:6443: connect: connection refused" logger="UnhandledError" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.783537 4710 factory.go:55] Registering systemd factory Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.783916 4710 factory.go:221] Registration of the systemd container factory successfully Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.783836 4710 server.go:460] "Adding debug handlers to kubelet server" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.785508 4710 factory.go:153] Registering CRI-O factory Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.785523 4710 factory.go:221] Registration of the crio container factory successfully Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.785570 4710 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.785585 4710 factory.go:103] Registering Raw factory Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.785598 4710 manager.go:1196] Started watching for new ooms in manager Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.786027 4710 manager.go:319] Starting recovery of all containers Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788235 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788498 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788511 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788520 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788530 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788538 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788547 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788556 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788565 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788576 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788584 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788594 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788601 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788611 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788623 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788632 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788640 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788649 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788657 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788665 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788673 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788681 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788689 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788698 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788707 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788716 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788728 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788739 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788748 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788773 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788781 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788789 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788798 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788808 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788817 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788825 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788833 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788842 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788850 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788860 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788867 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788875 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788889 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788898 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788908 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788916 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788925 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788933 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788941 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788950 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788958 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788966 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788977 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788986 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.788996 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789005 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789014 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789023 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789032 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789040 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789049 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789057 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789065 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789074 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789082 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789090 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789098 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789106 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789134 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789146 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789154 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789162 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789169 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789176 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789184 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789191 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789203 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789211 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789232 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789239 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789249 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789257 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789265 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789272 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789280 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.789288 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790528 4710 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790557 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790573 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790582 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790592 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790600 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790632 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790650 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790668 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790676 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790684 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790693 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790701 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790710 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790719 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790726 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790735 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790744 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790751 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790763 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790772 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790781 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790791 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790801 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790810 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790818 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790827 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790837 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790846 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790854 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790862 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790869 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790878 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790885 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790894 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790902 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790921 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790928 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790937 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790944 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790953 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790971 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790981 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790989 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.790996 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791005 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791013 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791020 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791029 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791037 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791044 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791051 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791060 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791068 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791076 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791085 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791093 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791102 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791111 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791119 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791127 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791136 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791144 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791152 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791161 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791170 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791178 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791188 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791195 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791203 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791211 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791227 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791235 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791243 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791250 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791258 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791265 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791274 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791281 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791309 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791316 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791325 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791332 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791341 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791348 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791374 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791383 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791391 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791410 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791419 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791441 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791453 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791462 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791470 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791477 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791486 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791494 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791502 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791509 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791517 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791525 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791533 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791541 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791548 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791556 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791563 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791573 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791580 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791588 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791597 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791604 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791611 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791619 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791627 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791634 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791643 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791650 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791657 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791665 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791673 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791681 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791688 4710 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791696 4710 reconstruct.go:97] "Volume reconstruction finished" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.791706 4710 reconciler.go:26] "Reconciler: start to sync state" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.796565 4710 manager.go:324] Recovery completed Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.802562 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.807454 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.807495 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.807507 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.809572 4710 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.809605 4710 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.809637 4710 state_mem.go:36] "Initialized new in-memory state store" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.812048 4710 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.813675 4710 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.813722 4710 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.813742 4710 kubelet.go:2335] "Starting kubelet main sync loop" Oct 09 09:04:36 crc kubenswrapper[4710]: E1009 09:04:36.813805 4710 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.814084 4710 policy_none.go:49] "None policy: Start" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.815107 4710 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.815137 4710 state_mem.go:35] "Initializing new in-memory state store" Oct 09 09:04:36 crc kubenswrapper[4710]: W1009 09:04:36.815146 4710 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 192.168.26.166:6443: connect: connection refused Oct 09 09:04:36 crc kubenswrapper[4710]: E1009 09:04:36.815196 4710 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 192.168.26.166:6443: connect: connection refused" logger="UnhandledError" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.858342 4710 manager.go:334] "Starting Device Plugin manager" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.858390 4710 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.858407 4710 server.go:79] "Starting device plugin registration server" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.860352 4710 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.860372 4710 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.860814 4710 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.860912 4710 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.860943 4710 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 09 09:04:36 crc kubenswrapper[4710]: E1009 09:04:36.866568 4710 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.914255 4710 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc"] Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.914364 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.915083 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.915117 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.915127 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.915453 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.915664 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.915723 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.916064 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.916084 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.916094 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.916214 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.916319 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.916345 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.916389 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.916323 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.916463 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.917002 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.917035 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.917045 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.917522 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.917548 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.917557 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.917625 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.917748 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.917780 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.918099 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.918143 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.918152 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.918255 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.918296 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.918310 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.918318 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.918508 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.918533 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.918873 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.918902 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.918912 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.918990 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.919011 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.919023 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.919050 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.919073 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.919578 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.919600 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.919610 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.960740 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.961833 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.961869 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.961881 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.961901 4710 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 09 09:04:36 crc kubenswrapper[4710]: E1009 09:04:36.962200 4710 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.26.166:6443: connect: connection refused" node="crc" Oct 09 09:04:36 crc kubenswrapper[4710]: E1009 09:04:36.983193 4710 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.166:6443: connect: connection refused" interval="400ms" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.993368 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.993508 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.993595 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.993704 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.993797 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.993871 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.993945 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.994012 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.994104 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.994210 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.994293 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.994359 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.994418 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.994510 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 09:04:36 crc kubenswrapper[4710]: I1009 09:04:36.994594 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096364 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096407 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096443 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096461 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096479 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096499 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096515 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096516 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096570 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096578 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096594 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096596 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096610 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096532 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096622 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096633 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096478 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096646 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096663 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096500 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096676 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096685 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096695 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096701 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096712 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096718 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096729 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096749 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096775 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.096802 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.165305 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.167806 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.167842 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.167854 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.167877 4710 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 09 09:04:37 crc kubenswrapper[4710]: E1009 09:04:37.168107 4710 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.26.166:6443: connect: connection refused" node="crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.249604 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.268066 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: W1009 09:04:37.272419 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-f3e223f25fb10aba3669fa6e4e80c9b92ab2444fc16ffb39a108921144957d82 WatchSource:0}: Error finding container f3e223f25fb10aba3669fa6e4e80c9b92ab2444fc16ffb39a108921144957d82: Status 404 returned error can't find the container with id f3e223f25fb10aba3669fa6e4e80c9b92ab2444fc16ffb39a108921144957d82 Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.281832 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: W1009 09:04:37.282627 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-8a0eaa38b995433ef71b097e4cf3847e40de09c297195acea80d73d50791d7bd WatchSource:0}: Error finding container 8a0eaa38b995433ef71b097e4cf3847e40de09c297195acea80d73d50791d7bd: Status 404 returned error can't find the container with id 8a0eaa38b995433ef71b097e4cf3847e40de09c297195acea80d73d50791d7bd Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.285883 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: W1009 09:04:37.294712 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-bf63f773cdfbb27523d7ad22132f0e19636aeb9c41b64e68eef313e265dba0f3 WatchSource:0}: Error finding container bf63f773cdfbb27523d7ad22132f0e19636aeb9c41b64e68eef313e265dba0f3: Status 404 returned error can't find the container with id bf63f773cdfbb27523d7ad22132f0e19636aeb9c41b64e68eef313e265dba0f3 Oct 09 09:04:37 crc kubenswrapper[4710]: W1009 09:04:37.295946 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-d989d91a791afcdf05e0e6c1a369b6d4d5174171096a9e5def4963b0c152c4b2 WatchSource:0}: Error finding container d989d91a791afcdf05e0e6c1a369b6d4d5174171096a9e5def4963b0c152c4b2: Status 404 returned error can't find the container with id d989d91a791afcdf05e0e6c1a369b6d4d5174171096a9e5def4963b0c152c4b2 Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.303191 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 09 09:04:37 crc kubenswrapper[4710]: W1009 09:04:37.318986 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-737e200f2cae110e373492acdcbaa6e640e08b3e99bd307e4899b98bfecb5396 WatchSource:0}: Error finding container 737e200f2cae110e373492acdcbaa6e640e08b3e99bd307e4899b98bfecb5396: Status 404 returned error can't find the container with id 737e200f2cae110e373492acdcbaa6e640e08b3e99bd307e4899b98bfecb5396 Oct 09 09:04:37 crc kubenswrapper[4710]: E1009 09:04:37.384957 4710 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.166:6443: connect: connection refused" interval="800ms" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.569136 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.571670 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.571717 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.571727 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.571756 4710 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 09 09:04:37 crc kubenswrapper[4710]: E1009 09:04:37.572182 4710 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.26.166:6443: connect: connection refused" node="crc" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.781628 4710 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 192.168.26.166:6443: connect: connection refused Oct 09 09:04:37 crc kubenswrapper[4710]: W1009 09:04:37.806403 4710 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 192.168.26.166:6443: connect: connection refused Oct 09 09:04:37 crc kubenswrapper[4710]: E1009 09:04:37.806541 4710 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 192.168.26.166:6443: connect: connection refused" logger="UnhandledError" Oct 09 09:04:37 crc kubenswrapper[4710]: W1009 09:04:37.815226 4710 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 192.168.26.166:6443: connect: connection refused Oct 09 09:04:37 crc kubenswrapper[4710]: E1009 09:04:37.815316 4710 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 192.168.26.166:6443: connect: connection refused" logger="UnhandledError" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.819237 4710 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="208b302ef86806904caaf46782331845b3c0c67376a4f4747b3757a4899582e4" exitCode=0 Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.819332 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"208b302ef86806904caaf46782331845b3c0c67376a4f4747b3757a4899582e4"} Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.819446 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"bf63f773cdfbb27523d7ad22132f0e19636aeb9c41b64e68eef313e265dba0f3"} Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.819527 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.820591 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.820635 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.820649 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.823261 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654"} Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.823295 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8a0eaa38b995433ef71b097e4cf3847e40de09c297195acea80d73d50791d7bd"} Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.825220 4710 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435" exitCode=0 Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.825292 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435"} Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.825314 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f3e223f25fb10aba3669fa6e4e80c9b92ab2444fc16ffb39a108921144957d82"} Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.825397 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.826418 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.826459 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.826471 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.827586 4710 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="15d0b78afb2dd6356645d2d3ab457694e493abbdb148dabd9b882edfac4b16a5" exitCode=0 Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.827641 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"15d0b78afb2dd6356645d2d3ab457694e493abbdb148dabd9b882edfac4b16a5"} Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.827687 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"737e200f2cae110e373492acdcbaa6e640e08b3e99bd307e4899b98bfecb5396"} Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.827766 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.828353 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.828380 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.828389 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.829800 4710 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="7a3613a4e5d2245727f06ede3f9973d27bdc4f1e4143ded9ce439003c695a1a0" exitCode=0 Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.829831 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"7a3613a4e5d2245727f06ede3f9973d27bdc4f1e4143ded9ce439003c695a1a0"} Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.829849 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"d989d91a791afcdf05e0e6c1a369b6d4d5174171096a9e5def4963b0c152c4b2"} Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.829903 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.830603 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.830628 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.830638 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.833670 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.835266 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.835304 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:37 crc kubenswrapper[4710]: I1009 09:04:37.835315 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:38 crc kubenswrapper[4710]: E1009 09:04:38.185653 4710 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.166:6443: connect: connection refused" interval="1.6s" Oct 09 09:04:38 crc kubenswrapper[4710]: W1009 09:04:38.202097 4710 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 192.168.26.166:6443: connect: connection refused Oct 09 09:04:38 crc kubenswrapper[4710]: E1009 09:04:38.202156 4710 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 192.168.26.166:6443: connect: connection refused" logger="UnhandledError" Oct 09 09:04:38 crc kubenswrapper[4710]: W1009 09:04:38.309407 4710 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 192.168.26.166:6443: connect: connection refused Oct 09 09:04:38 crc kubenswrapper[4710]: E1009 09:04:38.309511 4710 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 192.168.26.166:6443: connect: connection refused" logger="UnhandledError" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.373078 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.375296 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.375361 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.375374 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.375413 4710 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 09 09:04:38 crc kubenswrapper[4710]: E1009 09:04:38.376223 4710 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.26.166:6443: connect: connection refused" node="crc" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.833512 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"6fcd9e68107e1bf6c79d4d74d2069de37bcd60eed181f032371d47463ea6cf90"} Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.833549 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9898c2889f9a6045a8fe7b25dd8abbc4da70b547b547031167ef613827ca5922"} Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.833559 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"5fc8f2eb3fc14287e7a31ee0bf390c36a0744a9d95e7924eee729f4df93089d3"} Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.833631 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.834282 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.834305 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.834314 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.836372 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513"} Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.836395 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934"} Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.836404 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668"} Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.836470 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.836993 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.837012 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.837020 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.839554 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2"} Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.839575 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828"} Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.839585 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5"} Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.839593 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63"} Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.839600 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0"} Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.839654 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.840133 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.840156 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.840165 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.841607 4710 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="307b09154fb85493f316ce8cc526df61f6043752a676b53428edafcbb5a9df21" exitCode=0 Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.841658 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"307b09154fb85493f316ce8cc526df61f6043752a676b53428edafcbb5a9df21"} Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.841729 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.842276 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.842294 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.842302 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.843656 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"eb3a6a6b28c64ba39e628cd4f0ca4cdd07fecb98e9b5a5598b9bbc06d90ec825"} Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.843706 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.844118 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.844136 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:38 crc kubenswrapper[4710]: I1009 09:04:38.844144 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:39 crc kubenswrapper[4710]: I1009 09:04:39.716275 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 09:04:39 crc kubenswrapper[4710]: I1009 09:04:39.848982 4710 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="1babaa7a26416781a494eb627f2bb129d613ee5707d5ce69b9bfc570980640ec" exitCode=0 Oct 09 09:04:39 crc kubenswrapper[4710]: I1009 09:04:39.849114 4710 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 09 09:04:39 crc kubenswrapper[4710]: I1009 09:04:39.849162 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:39 crc kubenswrapper[4710]: I1009 09:04:39.849536 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"1babaa7a26416781a494eb627f2bb129d613ee5707d5ce69b9bfc570980640ec"} Oct 09 09:04:39 crc kubenswrapper[4710]: I1009 09:04:39.849684 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:39 crc kubenswrapper[4710]: I1009 09:04:39.849723 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:39 crc kubenswrapper[4710]: I1009 09:04:39.850567 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:39 crc kubenswrapper[4710]: I1009 09:04:39.850631 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:39 crc kubenswrapper[4710]: I1009 09:04:39.850648 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:39 crc kubenswrapper[4710]: I1009 09:04:39.852982 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:39 crc kubenswrapper[4710]: I1009 09:04:39.853010 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:39 crc kubenswrapper[4710]: I1009 09:04:39.853020 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:39 crc kubenswrapper[4710]: I1009 09:04:39.853116 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:39 crc kubenswrapper[4710]: I1009 09:04:39.853128 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:39 crc kubenswrapper[4710]: I1009 09:04:39.853141 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:39 crc kubenswrapper[4710]: I1009 09:04:39.977146 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:39 crc kubenswrapper[4710]: I1009 09:04:39.978170 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:39 crc kubenswrapper[4710]: I1009 09:04:39.978205 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:39 crc kubenswrapper[4710]: I1009 09:04:39.978215 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:39 crc kubenswrapper[4710]: I1009 09:04:39.978238 4710 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 09 09:04:40 crc kubenswrapper[4710]: I1009 09:04:40.108036 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 09:04:40 crc kubenswrapper[4710]: I1009 09:04:40.430451 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 09:04:40 crc kubenswrapper[4710]: I1009 09:04:40.854375 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"38153b764b0e0a4be98293a91d411c7a4e036de3963c35d426e02352568fecd8"} Oct 09 09:04:40 crc kubenswrapper[4710]: I1009 09:04:40.854441 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f35c2c8069414d1bbbf0bfe4e7020dbfa78d024ec43161db9a06fccdc23b6e55"} Oct 09 09:04:40 crc kubenswrapper[4710]: I1009 09:04:40.854455 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"029c6476ce646f47c6104164ce2bffe8ac68b2b65c594a25308345f8befdcfef"} Oct 09 09:04:40 crc kubenswrapper[4710]: I1009 09:04:40.854466 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"33e9742ccbade317b6ab495d6fec629058fba4bc6cbc22b8fb403eca0d9b1f12"} Oct 09 09:04:40 crc kubenswrapper[4710]: I1009 09:04:40.854474 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"65b3efceddeaee218d95f885a5ae7a54983cc0746bcb1674484fd9089d21c98a"} Oct 09 09:04:40 crc kubenswrapper[4710]: I1009 09:04:40.854524 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:40 crc kubenswrapper[4710]: I1009 09:04:40.854600 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:40 crc kubenswrapper[4710]: I1009 09:04:40.855618 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:40 crc kubenswrapper[4710]: I1009 09:04:40.855644 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:40 crc kubenswrapper[4710]: I1009 09:04:40.855656 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:40 crc kubenswrapper[4710]: I1009 09:04:40.855669 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:40 crc kubenswrapper[4710]: I1009 09:04:40.855682 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:40 crc kubenswrapper[4710]: I1009 09:04:40.855671 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:40 crc kubenswrapper[4710]: I1009 09:04:40.986424 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 09:04:40 crc kubenswrapper[4710]: I1009 09:04:40.986711 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:40 crc kubenswrapper[4710]: I1009 09:04:40.987327 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:40 crc kubenswrapper[4710]: I1009 09:04:40.987350 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:40 crc kubenswrapper[4710]: I1009 09:04:40.987359 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:41 crc kubenswrapper[4710]: I1009 09:04:41.380564 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 09:04:41 crc kubenswrapper[4710]: I1009 09:04:41.768241 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 09 09:04:41 crc kubenswrapper[4710]: I1009 09:04:41.856213 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:41 crc kubenswrapper[4710]: I1009 09:04:41.856258 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:41 crc kubenswrapper[4710]: I1009 09:04:41.856213 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:41 crc kubenswrapper[4710]: I1009 09:04:41.856996 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:41 crc kubenswrapper[4710]: I1009 09:04:41.857017 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:41 crc kubenswrapper[4710]: I1009 09:04:41.857027 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:41 crc kubenswrapper[4710]: I1009 09:04:41.857147 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:41 crc kubenswrapper[4710]: I1009 09:04:41.857168 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:41 crc kubenswrapper[4710]: I1009 09:04:41.857178 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:41 crc kubenswrapper[4710]: I1009 09:04:41.857168 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:41 crc kubenswrapper[4710]: I1009 09:04:41.857205 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:41 crc kubenswrapper[4710]: I1009 09:04:41.857213 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:43 crc kubenswrapper[4710]: I1009 09:04:43.586612 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 09 09:04:43 crc kubenswrapper[4710]: I1009 09:04:43.586750 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:43 crc kubenswrapper[4710]: I1009 09:04:43.587607 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:43 crc kubenswrapper[4710]: I1009 09:04:43.587628 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:43 crc kubenswrapper[4710]: I1009 09:04:43.587636 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:46 crc kubenswrapper[4710]: I1009 09:04:46.725546 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 09:04:46 crc kubenswrapper[4710]: I1009 09:04:46.725650 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:46 crc kubenswrapper[4710]: I1009 09:04:46.726503 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:46 crc kubenswrapper[4710]: I1009 09:04:46.726534 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:46 crc kubenswrapper[4710]: I1009 09:04:46.726544 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:46 crc kubenswrapper[4710]: I1009 09:04:46.729394 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 09:04:46 crc kubenswrapper[4710]: I1009 09:04:46.866375 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:46 crc kubenswrapper[4710]: E1009 09:04:46.866620 4710 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 09 09:04:46 crc kubenswrapper[4710]: I1009 09:04:46.866964 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:46 crc kubenswrapper[4710]: I1009 09:04:46.866991 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:46 crc kubenswrapper[4710]: I1009 09:04:46.867000 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:47 crc kubenswrapper[4710]: I1009 09:04:47.014220 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 09:04:47 crc kubenswrapper[4710]: I1009 09:04:47.575682 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 09:04:47 crc kubenswrapper[4710]: I1009 09:04:47.575856 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:47 crc kubenswrapper[4710]: I1009 09:04:47.576739 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:47 crc kubenswrapper[4710]: I1009 09:04:47.576766 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:47 crc kubenswrapper[4710]: I1009 09:04:47.576774 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:47 crc kubenswrapper[4710]: I1009 09:04:47.868385 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:47 crc kubenswrapper[4710]: I1009 09:04:47.869094 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:47 crc kubenswrapper[4710]: I1009 09:04:47.869124 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:47 crc kubenswrapper[4710]: I1009 09:04:47.869134 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:47 crc kubenswrapper[4710]: I1009 09:04:47.872011 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 09:04:48 crc kubenswrapper[4710]: I1009 09:04:48.780900 4710 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Oct 09 09:04:48 crc kubenswrapper[4710]: I1009 09:04:48.869540 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:48 crc kubenswrapper[4710]: I1009 09:04:48.870560 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:48 crc kubenswrapper[4710]: I1009 09:04:48.870584 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:48 crc kubenswrapper[4710]: I1009 09:04:48.870593 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:49 crc kubenswrapper[4710]: I1009 09:04:49.030807 4710 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 09 09:04:49 crc kubenswrapper[4710]: I1009 09:04:49.030852 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 09 09:04:49 crc kubenswrapper[4710]: I1009 09:04:49.039887 4710 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 09 09:04:49 crc kubenswrapper[4710]: I1009 09:04:49.039935 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 09 09:04:50 crc kubenswrapper[4710]: I1009 09:04:50.015189 4710 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 09 09:04:50 crc kubenswrapper[4710]: I1009 09:04:50.015339 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 09:04:50 crc kubenswrapper[4710]: I1009 09:04:50.435074 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 09:04:50 crc kubenswrapper[4710]: I1009 09:04:50.435187 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:50 crc kubenswrapper[4710]: I1009 09:04:50.436083 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:50 crc kubenswrapper[4710]: I1009 09:04:50.436113 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:50 crc kubenswrapper[4710]: I1009 09:04:50.436122 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:50 crc kubenswrapper[4710]: I1009 09:04:50.438174 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 09:04:50 crc kubenswrapper[4710]: I1009 09:04:50.873255 4710 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 09 09:04:50 crc kubenswrapper[4710]: I1009 09:04:50.873296 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:50 crc kubenswrapper[4710]: I1009 09:04:50.873947 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:50 crc kubenswrapper[4710]: I1009 09:04:50.873976 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:50 crc kubenswrapper[4710]: I1009 09:04:50.873985 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:53 crc kubenswrapper[4710]: I1009 09:04:53.605221 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 09 09:04:53 crc kubenswrapper[4710]: I1009 09:04:53.605361 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:53 crc kubenswrapper[4710]: I1009 09:04:53.606166 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:53 crc kubenswrapper[4710]: I1009 09:04:53.606194 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:53 crc kubenswrapper[4710]: I1009 09:04:53.606204 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:53 crc kubenswrapper[4710]: I1009 09:04:53.614029 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 09 09:04:53 crc kubenswrapper[4710]: I1009 09:04:53.880863 4710 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 09 09:04:53 crc kubenswrapper[4710]: I1009 09:04:53.881485 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:53 crc kubenswrapper[4710]: I1009 09:04:53.881511 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:53 crc kubenswrapper[4710]: I1009 09:04:53.881519 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.024567 4710 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.028998 4710 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.029341 4710 trace.go:236] Trace[1943753641]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (09-Oct-2025 09:04:40.334) (total time: 13694ms): Oct 09 09:04:54 crc kubenswrapper[4710]: Trace[1943753641]: ---"Objects listed" error: 13694ms (09:04:54.029) Oct 09 09:04:54 crc kubenswrapper[4710]: Trace[1943753641]: [13.694835939s] [13.694835939s] END Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.029365 4710 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.029937 4710 trace.go:236] Trace[687543594]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (09-Oct-2025 09:04:41.286) (total time: 12743ms): Oct 09 09:04:54 crc kubenswrapper[4710]: Trace[687543594]: ---"Objects listed" error: 12742ms (09:04:54.029) Oct 09 09:04:54 crc kubenswrapper[4710]: Trace[687543594]: [12.74319211s] [12.74319211s] END Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.029960 4710 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.030123 4710 trace.go:236] Trace[1808512379]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (09-Oct-2025 09:04:40.535) (total time: 13494ms): Oct 09 09:04:54 crc kubenswrapper[4710]: Trace[1808512379]: ---"Objects listed" error: 13494ms (09:04:54.029) Oct 09 09:04:54 crc kubenswrapper[4710]: Trace[1808512379]: [13.494792009s] [13.494792009s] END Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.030241 4710 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.030688 4710 trace.go:236] Trace[410868612]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (09-Oct-2025 09:04:40.280) (total time: 13750ms): Oct 09 09:04:54 crc kubenswrapper[4710]: Trace[410868612]: ---"Objects listed" error: 13750ms (09:04:54.030) Oct 09 09:04:54 crc kubenswrapper[4710]: Trace[410868612]: [13.750289652s] [13.750289652s] END Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.030706 4710 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.037168 4710 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.037361 4710 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.038304 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.038336 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.038346 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.038361 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.038370 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:54Z","lastTransitionTime":"2025-10-09T09:04:54Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.046842 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.049163 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.049188 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.049195 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.049210 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.049219 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:54Z","lastTransitionTime":"2025-10-09T09:04:54Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.058106 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.060913 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.060941 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.060950 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.060963 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.060971 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:54Z","lastTransitionTime":"2025-10-09T09:04:54Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.069728 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.071981 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.072008 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.072016 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.072030 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.072039 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:54Z","lastTransitionTime":"2025-10-09T09:04:54Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.078337 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.080855 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.080881 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.080890 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.080902 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.080912 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:54Z","lastTransitionTime":"2025-10-09T09:04:54Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.087009 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.087102 4710 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.087971 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.087990 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.087998 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.088009 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.088017 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:54Z","lastTransitionTime":"2025-10-09T09:04:54Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.189588 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.189619 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.189629 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.189642 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.189651 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:54Z","lastTransitionTime":"2025-10-09T09:04:54Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.291893 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.291928 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.291937 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.291951 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.291960 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:54Z","lastTransitionTime":"2025-10-09T09:04:54Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.314675 4710 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:39458->192.168.126.11:17697: read: connection reset by peer" start-of-body= Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.314712 4710 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:39466->192.168.126.11:17697: read: connection reset by peer" start-of-body= Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.314726 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:39458->192.168.126.11:17697: read: connection reset by peer" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.314752 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:39466->192.168.126.11:17697: read: connection reset by peer" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.314973 4710 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.315029 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.393701 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.393857 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.393933 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.394001 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.394065 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:54Z","lastTransitionTime":"2025-10-09T09:04:54Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.495866 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.495901 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.495909 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.495931 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.495940 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:54Z","lastTransitionTime":"2025-10-09T09:04:54Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.597384 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.597596 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.597671 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.597744 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.597801 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:54Z","lastTransitionTime":"2025-10-09T09:04:54Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.699829 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.699866 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.699874 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.699890 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.699899 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:54Z","lastTransitionTime":"2025-10-09T09:04:54Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.779241 4710 apiserver.go:52] "Watching apiserver" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.781483 4710 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.781683 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb"] Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.781952 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.782009 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.782029 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.782067 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.781963 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.782364 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.782366 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.782470 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.782667 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.782877 4710 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.784148 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.784211 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.784272 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.784308 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.784464 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.784958 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.785715 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.786362 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.786627 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.801399 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.801444 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.801453 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.801464 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.801472 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:54Z","lastTransitionTime":"2025-10-09T09:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.806831 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.814425 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.820781 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.826911 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.834303 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.834358 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.834638 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.834593 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.834762 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.834839 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.834911 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.835006 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.835331 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.835646 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.835947 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.836050 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.834921 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.836123 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.834988 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.836243 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.836250 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.836266 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.835107 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.835289 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.835416 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.835606 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.835906 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.836312 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:04:55.336298259 +0000 UTC m=+18.826406656 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.836358 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.836514 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.836286 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.836542 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.836574 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.836595 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.836755 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.836770 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.836798 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.836822 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.836836 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.836851 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.836871 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.836884 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.836899 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.836913 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.836927 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837053 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837138 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837332 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837357 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837411 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837570 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837630 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837666 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837671 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837687 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837705 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837724 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837737 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837752 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837763 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837766 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837810 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837827 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837842 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837856 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837871 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837890 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837940 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837955 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837972 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837985 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838001 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838027 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838043 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838058 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838072 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838086 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838099 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838112 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838125 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838140 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838154 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838188 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838218 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838244 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838259 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838272 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838287 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838305 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838320 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838336 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838351 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838364 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838378 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838394 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838407 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838420 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838449 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838464 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838494 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838509 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838523 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838537 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838551 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838569 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838587 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838613 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838628 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838644 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838659 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838673 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838687 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838701 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838719 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838735 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838749 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838763 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838777 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838794 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838810 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838824 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838839 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838853 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838867 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838880 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838893 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838907 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838921 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838935 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838951 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838964 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838978 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838991 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839004 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839019 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839032 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839046 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839067 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839082 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839095 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839109 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839123 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839136 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839153 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839169 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839185 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839200 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839217 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839246 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839261 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839277 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839292 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839309 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839328 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839345 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839360 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839376 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839390 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839404 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839418 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839447 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839462 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839478 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839492 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839506 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839531 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839545 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839560 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839575 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839591 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839606 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839621 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839636 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839650 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839664 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839679 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839693 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839707 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839723 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839737 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839753 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839767 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839781 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839798 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839812 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839829 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839847 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839862 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839877 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839898 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839913 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839928 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839942 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839957 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839974 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839990 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840005 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840020 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840038 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840052 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840066 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840082 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840097 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840112 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840127 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840143 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840158 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840172 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840186 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840201 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840217 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840249 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840265 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840280 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840295 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840310 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840327 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840345 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840360 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840375 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840391 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840405 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840419 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840464 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840489 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840506 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840525 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840541 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840559 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840576 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840591 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840609 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840626 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840644 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840660 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840677 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840693 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840738 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840749 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840758 4710 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840767 4710 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840799 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840808 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840816 4710 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840824 4710 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840834 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840842 4710 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840850 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840859 4710 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840868 4710 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840876 4710 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840885 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840895 4710 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840904 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840913 4710 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840922 4710 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840931 4710 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840954 4710 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840964 4710 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838056 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.837929 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838131 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838192 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838336 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838496 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838694 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.838849 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839006 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839154 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839241 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839582 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.839780 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840216 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840278 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840328 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840461 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840531 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840625 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840722 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840758 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840905 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.840939 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.841159 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.841464 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.841510 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.841776 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.841879 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.841937 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.842020 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.842153 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.842459 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.842480 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.842522 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.842760 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.842938 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.842969 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.843111 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.843187 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.843213 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.843273 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.843294 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.843388 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.843508 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.843584 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.843638 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.843637 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.843775 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.843895 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.843931 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.844167 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.844194 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.844264 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.844337 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.844356 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.844402 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.844474 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.844493 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.844583 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.845123 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.845913 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.846063 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.846161 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.846498 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.846562 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.846639 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.846969 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.846984 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.847141 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.847158 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.847268 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.847364 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.847409 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.847587 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.847598 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.847611 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.847700 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.847848 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.847796 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.847892 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.847920 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.848067 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.848370 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.848422 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.848545 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.848743 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.848908 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.848951 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.848988 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.849049 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.849243 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.849369 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.849486 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.849401 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.849566 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.849622 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.849775 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.849894 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.850121 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.850148 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.850337 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.850582 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.850593 4710 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.850636 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.850647 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 09:04:55.350635993 +0000 UTC m=+18.840744391 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.851195 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.851203 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.851324 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.851617 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.852563 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.852569 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.852728 4710 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.852798 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 09:04:55.352781939 +0000 UTC m=+18.842890336 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.853108 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.853177 4710 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.853188 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.853893 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.854005 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.853382 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.854339 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.854373 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.854560 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.854618 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.854891 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.855017 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.855530 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.855564 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.855649 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.855762 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.855814 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.855872 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.856061 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.856103 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.856253 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.856493 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.856608 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.856847 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.857514 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.857776 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.858005 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.858075 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.858219 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.858264 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.858307 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.858607 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.860489 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.861059 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.861194 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.861278 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.861653 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.861672 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.861709 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.861728 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.861938 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.862549 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.862651 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.862712 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.862834 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.862953 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.863294 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.863324 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.863358 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.863380 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.864589 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.864592 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.864645 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.864659 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.864666 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.865012 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.865035 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.865154 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.865380 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.865705 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.865768 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.865803 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.865867 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.865922 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.865937 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.865946 4710 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.865983 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 09:04:55.365973452 +0000 UTC m=+18.856081839 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.866331 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.867801 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.869070 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.869592 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.871068 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.871153 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.871213 4710 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:04:54 crc kubenswrapper[4710]: E1009 09:04:54.871308 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 09:04:55.371297397 +0000 UTC m=+18.861405794 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.872521 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.873058 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.873508 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.875904 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.879655 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.883942 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.885165 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.886288 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.887904 4710 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2" exitCode=255 Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.887938 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2"} Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.891970 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.896591 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.896868 4710 scope.go:117] "RemoveContainer" containerID="06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.897819 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.903683 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.903705 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.903713 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.903726 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.903734 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:54Z","lastTransitionTime":"2025-10-09T09:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.904710 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.911044 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.918049 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.924812 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.930212 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941592 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941662 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941730 4710 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941731 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941746 4710 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941788 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941798 4710 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941808 4710 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941817 4710 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941825 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941833 4710 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941840 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941848 4710 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941858 4710 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941867 4710 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941876 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941884 4710 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941891 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941900 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941908 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941915 4710 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941922 4710 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941930 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941938 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941947 4710 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941956 4710 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941963 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941971 4710 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941978 4710 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941986 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.941993 4710 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942000 4710 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942008 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942016 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942024 4710 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942034 4710 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942041 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942048 4710 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942062 4710 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942070 4710 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942079 4710 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942087 4710 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942094 4710 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942109 4710 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942116 4710 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942124 4710 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942131 4710 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942139 4710 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942146 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942154 4710 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942162 4710 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942171 4710 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942170 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942178 4710 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942207 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942217 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942226 4710 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942249 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942258 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942272 4710 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942285 4710 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942294 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942302 4710 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942327 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942335 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942344 4710 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942352 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942359 4710 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942366 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942383 4710 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942404 4710 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942421 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942455 4710 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942464 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942471 4710 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942479 4710 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942488 4710 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942495 4710 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942502 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942509 4710 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942516 4710 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942524 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942531 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942540 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942547 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942555 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942564 4710 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942571 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942578 4710 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942585 4710 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942594 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942607 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942616 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942623 4710 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942630 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942637 4710 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942645 4710 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942652 4710 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942660 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942667 4710 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942676 4710 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942684 4710 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942692 4710 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942699 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942707 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942714 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942723 4710 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942730 4710 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942738 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942745 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942752 4710 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942759 4710 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942766 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942773 4710 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942780 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942790 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942798 4710 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942805 4710 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942813 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942820 4710 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942828 4710 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942836 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942843 4710 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942850 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942856 4710 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942864 4710 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942871 4710 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942879 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942886 4710 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942893 4710 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942899 4710 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942906 4710 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942914 4710 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942921 4710 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942929 4710 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942936 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942944 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942951 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942959 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942966 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942973 4710 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942981 4710 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942989 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.942996 4710 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943003 4710 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943011 4710 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943018 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943027 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943034 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943042 4710 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943049 4710 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943055 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943063 4710 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943070 4710 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943077 4710 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943085 4710 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943093 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943100 4710 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943107 4710 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943115 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943123 4710 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943133 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943140 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943146 4710 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943154 4710 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943162 4710 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943170 4710 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943177 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943185 4710 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943192 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943199 4710 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943206 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943213 4710 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943221 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943271 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:54 crc kubenswrapper[4710]: I1009 09:04:54.943281 4710 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.005912 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.005943 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.005952 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.005964 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.005973 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:55Z","lastTransitionTime":"2025-10-09T09:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.092955 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.098746 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.105183 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 09 09:04:55 crc kubenswrapper[4710]: W1009 09:04:55.111700 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-29749768d1a1623cac38e9fed58e3e645b2dafae231923bfbb8a9d1cadf9b741 WatchSource:0}: Error finding container 29749768d1a1623cac38e9fed58e3e645b2dafae231923bfbb8a9d1cadf9b741: Status 404 returned error can't find the container with id 29749768d1a1623cac38e9fed58e3e645b2dafae231923bfbb8a9d1cadf9b741 Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.112455 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.112476 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.112484 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.112495 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.112504 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:55Z","lastTransitionTime":"2025-10-09T09:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:55 crc kubenswrapper[4710]: W1009 09:04:55.117506 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-13e9f9a41fb06e75d717adeb0615d969b91ce2ffef3921c28005170928c11e58 WatchSource:0}: Error finding container 13e9f9a41fb06e75d717adeb0615d969b91ce2ffef3921c28005170928c11e58: Status 404 returned error can't find the container with id 13e9f9a41fb06e75d717adeb0615d969b91ce2ffef3921c28005170928c11e58 Oct 09 09:04:55 crc kubenswrapper[4710]: W1009 09:04:55.118763 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-73f20f2da095ba4dc7acdd17ae3e6325b259a74350053265ec6c2f09e89412ec WatchSource:0}: Error finding container 73f20f2da095ba4dc7acdd17ae3e6325b259a74350053265ec6c2f09e89412ec: Status 404 returned error can't find the container with id 73f20f2da095ba4dc7acdd17ae3e6325b259a74350053265ec6c2f09e89412ec Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.214219 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.214250 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.214259 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.214270 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.214278 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:55Z","lastTransitionTime":"2025-10-09T09:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.316276 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.316311 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.316321 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.316333 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.316341 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:55Z","lastTransitionTime":"2025-10-09T09:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.345980 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:04:55 crc kubenswrapper[4710]: E1009 09:04:55.346126 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:04:56.346108903 +0000 UTC m=+19.836217300 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.418603 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.418638 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.418648 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.418707 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.418717 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:55Z","lastTransitionTime":"2025-10-09T09:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.447088 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.447142 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.447162 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.447198 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:04:55 crc kubenswrapper[4710]: E1009 09:04:55.447273 4710 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 09:04:55 crc kubenswrapper[4710]: E1009 09:04:55.447312 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 09:04:56.447302055 +0000 UTC m=+19.937410451 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 09:04:55 crc kubenswrapper[4710]: E1009 09:04:55.447532 4710 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 09:04:55 crc kubenswrapper[4710]: E1009 09:04:55.447546 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 09:04:55 crc kubenswrapper[4710]: E1009 09:04:55.447567 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 09:04:55 crc kubenswrapper[4710]: E1009 09:04:55.447579 4710 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:04:55 crc kubenswrapper[4710]: E1009 09:04:55.447589 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 09:04:56.447576231 +0000 UTC m=+19.937684629 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 09:04:55 crc kubenswrapper[4710]: E1009 09:04:55.447611 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 09:04:56.447601559 +0000 UTC m=+19.937709956 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:04:55 crc kubenswrapper[4710]: E1009 09:04:55.447649 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 09:04:55 crc kubenswrapper[4710]: E1009 09:04:55.447659 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 09:04:55 crc kubenswrapper[4710]: E1009 09:04:55.447668 4710 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:04:55 crc kubenswrapper[4710]: E1009 09:04:55.447696 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 09:04:56.447687952 +0000 UTC m=+19.937796349 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.521300 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.521333 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.521341 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.521354 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.521365 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:55Z","lastTransitionTime":"2025-10-09T09:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.623319 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.623343 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.623353 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.623363 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.623371 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:55Z","lastTransitionTime":"2025-10-09T09:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.725717 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.725765 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.725775 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.725787 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.725795 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:55Z","lastTransitionTime":"2025-10-09T09:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.828017 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.828056 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.828064 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.828077 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.828085 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:55Z","lastTransitionTime":"2025-10-09T09:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.890854 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496"} Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.890889 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"29749768d1a1623cac38e9fed58e3e645b2dafae231923bfbb8a9d1cadf9b741"} Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.892761 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.894770 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16"} Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.894879 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.895896 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"73f20f2da095ba4dc7acdd17ae3e6325b259a74350053265ec6c2f09e89412ec"} Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.897045 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392"} Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.897069 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390"} Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.897079 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"13e9f9a41fb06e75d717adeb0615d969b91ce2ffef3921c28005170928c11e58"} Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.903953 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:55Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.912956 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:55Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.921824 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:55Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.929267 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.929293 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.929302 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.929313 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.929321 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:55Z","lastTransitionTime":"2025-10-09T09:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.929821 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:55Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.946133 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:55Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.963303 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:55Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.975470 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:55Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:55 crc kubenswrapper[4710]: I1009 09:04:55.994739 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:55Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.004292 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.014578 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.025061 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.031762 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.031827 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.031837 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.031850 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.031858 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:56Z","lastTransitionTime":"2025-10-09T09:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.034190 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.043738 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.053196 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.133570 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.133599 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.133607 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.133620 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.133629 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:56Z","lastTransitionTime":"2025-10-09T09:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.235748 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.235780 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.235790 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.235803 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.235812 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:56Z","lastTransitionTime":"2025-10-09T09:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.337706 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.337739 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.337748 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.337759 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.337769 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:56Z","lastTransitionTime":"2025-10-09T09:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.355132 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:04:56 crc kubenswrapper[4710]: E1009 09:04:56.355291 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:04:58.355276878 +0000 UTC m=+21.845385275 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.439738 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.439767 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.439775 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.439787 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.439795 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:56Z","lastTransitionTime":"2025-10-09T09:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.456182 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.456213 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.456243 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.456261 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:04:56 crc kubenswrapper[4710]: E1009 09:04:56.456337 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 09:04:56 crc kubenswrapper[4710]: E1009 09:04:56.456349 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 09:04:56 crc kubenswrapper[4710]: E1009 09:04:56.456358 4710 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:04:56 crc kubenswrapper[4710]: E1009 09:04:56.456368 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 09:04:56 crc kubenswrapper[4710]: E1009 09:04:56.456389 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 09:04:58.456379038 +0000 UTC m=+21.946487435 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:04:56 crc kubenswrapper[4710]: E1009 09:04:56.456391 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 09:04:56 crc kubenswrapper[4710]: E1009 09:04:56.456406 4710 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:04:56 crc kubenswrapper[4710]: E1009 09:04:56.456468 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 09:04:58.456453979 +0000 UTC m=+21.946562386 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:04:56 crc kubenswrapper[4710]: E1009 09:04:56.456522 4710 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 09:04:56 crc kubenswrapper[4710]: E1009 09:04:56.456549 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 09:04:58.456541133 +0000 UTC m=+21.946649540 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 09:04:56 crc kubenswrapper[4710]: E1009 09:04:56.456547 4710 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 09:04:56 crc kubenswrapper[4710]: E1009 09:04:56.456596 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 09:04:58.456583203 +0000 UTC m=+21.946691610 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.544216 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.544321 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.544331 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.544344 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.544352 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:56Z","lastTransitionTime":"2025-10-09T09:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.646300 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.646348 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.646358 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.646371 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.646379 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:56Z","lastTransitionTime":"2025-10-09T09:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.748633 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.748663 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.748672 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.748684 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.748692 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:56Z","lastTransitionTime":"2025-10-09T09:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.814603 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:04:56 crc kubenswrapper[4710]: E1009 09:04:56.814703 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.814738 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:04:56 crc kubenswrapper[4710]: E1009 09:04:56.814965 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.815028 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:04:56 crc kubenswrapper[4710]: E1009 09:04:56.815113 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.817479 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.818048 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.818799 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.819318 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.819816 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.820272 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.820764 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.821213 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.821758 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.822187 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.822636 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.823190 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.823631 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.824064 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.824535 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.824998 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.825486 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.825597 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.825818 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.828719 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.829208 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.829949 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.830534 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.830902 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.831780 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.832156 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.833033 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.833567 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.834156 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.834535 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.835211 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.836180 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.836758 4710 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.836889 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.839118 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.839607 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.839986 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.841443 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.841997 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.842377 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.842863 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.843753 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.844322 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.845070 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.845605 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.846482 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.847003 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.847795 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.848258 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.849022 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.849689 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.850441 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.850499 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.850526 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.850535 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.850544 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.850553 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:56Z","lastTransitionTime":"2025-10-09T09:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.850843 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.850877 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.851590 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.852040 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.852563 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.853304 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.858795 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.868087 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.878215 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.899399 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc"} Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.911212 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.919817 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.927997 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.936344 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.944512 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.952301 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.952330 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.952341 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.952353 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.952363 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:56Z","lastTransitionTime":"2025-10-09T09:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.953166 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:56 crc kubenswrapper[4710]: I1009 09:04:56.960500 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.018042 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.021033 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.023390 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.026931 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:57Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.034636 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:57Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.042318 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:57Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.050452 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:57Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.053798 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.053825 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.053835 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.053847 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.053856 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:57Z","lastTransitionTime":"2025-10-09T09:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.057291 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:57Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.065154 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:57Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.072687 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:57Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.082201 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:57Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.090938 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:57Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.098707 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:57Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.106513 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:57Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.113651 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:57Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.121755 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:57Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.129300 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:57Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.136535 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:57Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.155849 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.155875 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.155884 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.155895 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.155904 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:57Z","lastTransitionTime":"2025-10-09T09:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.257488 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.257516 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.257525 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.257534 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.257543 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:57Z","lastTransitionTime":"2025-10-09T09:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.359358 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.359389 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.359398 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.359408 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.359416 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:57Z","lastTransitionTime":"2025-10-09T09:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.462304 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.462355 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.462366 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.462378 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.462386 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:57Z","lastTransitionTime":"2025-10-09T09:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.563946 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.563981 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.563989 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.564002 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.564010 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:57Z","lastTransitionTime":"2025-10-09T09:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.665778 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.665815 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.665824 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.665837 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.665846 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:57Z","lastTransitionTime":"2025-10-09T09:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.768604 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.768638 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.768649 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.768661 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.768670 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:57Z","lastTransitionTime":"2025-10-09T09:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.870908 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.870946 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.870955 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.870970 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.870978 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:57Z","lastTransitionTime":"2025-10-09T09:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.972902 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.972936 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.972955 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.972969 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:57 crc kubenswrapper[4710]: I1009 09:04:57.972977 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:57Z","lastTransitionTime":"2025-10-09T09:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:57 crc kubenswrapper[4710]: E1009 09:04:57.977465 4710 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.074324 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.074353 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.074362 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.074374 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.074383 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:58Z","lastTransitionTime":"2025-10-09T09:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.176183 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.176209 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.176217 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.176237 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.176246 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:58Z","lastTransitionTime":"2025-10-09T09:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.277651 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.277857 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.277941 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.278013 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.278068 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:58Z","lastTransitionTime":"2025-10-09T09:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.369080 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:04:58 crc kubenswrapper[4710]: E1009 09:04:58.369270 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:05:02.36924793 +0000 UTC m=+25.859356327 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.380444 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.380478 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.380492 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.380505 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.380514 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:58Z","lastTransitionTime":"2025-10-09T09:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.470006 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.470049 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.470077 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.470100 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:04:58 crc kubenswrapper[4710]: E1009 09:04:58.470192 4710 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 09:04:58 crc kubenswrapper[4710]: E1009 09:04:58.470197 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 09:04:58 crc kubenswrapper[4710]: E1009 09:04:58.470218 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 09:04:58 crc kubenswrapper[4710]: E1009 09:04:58.470243 4710 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:04:58 crc kubenswrapper[4710]: E1009 09:04:58.470251 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 09:05:02.470239702 +0000 UTC m=+25.960348099 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 09:04:58 crc kubenswrapper[4710]: E1009 09:04:58.470252 4710 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 09:04:58 crc kubenswrapper[4710]: E1009 09:04:58.470283 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 09:05:02.470270199 +0000 UTC m=+25.960378596 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:04:58 crc kubenswrapper[4710]: E1009 09:04:58.470319 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 09:05:02.470304433 +0000 UTC m=+25.960412830 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 09:04:58 crc kubenswrapper[4710]: E1009 09:04:58.470341 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 09:04:58 crc kubenswrapper[4710]: E1009 09:04:58.470369 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 09:04:58 crc kubenswrapper[4710]: E1009 09:04:58.470379 4710 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:04:58 crc kubenswrapper[4710]: E1009 09:04:58.470406 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 09:05:02.470395685 +0000 UTC m=+25.960504082 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.482403 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.482461 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.482471 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.482484 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.482493 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:58Z","lastTransitionTime":"2025-10-09T09:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.515390 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-5c9mg"] Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.515665 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.517859 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-fzkfm"] Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.518101 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-zzrnh"] Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.518182 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.518282 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-zzrnh" Oct 09 09:04:58 crc kubenswrapper[4710]: W1009 09:04:58.519595 4710 reflector.go:561] object-"openshift-multus"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Oct 09 09:04:58 crc kubenswrapper[4710]: E1009 09:04:58.519624 4710 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 09 09:04:58 crc kubenswrapper[4710]: W1009 09:04:58.520030 4710 reflector.go:561] object-"openshift-multus"/"multus-daemon-config": failed to list *v1.ConfigMap: configmaps "multus-daemon-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-multus": no relationship found between node 'crc' and this object Oct 09 09:04:58 crc kubenswrapper[4710]: E1009 09:04:58.520060 4710 reflector.go:158] "Unhandled Error" err="object-\"openshift-multus\"/\"multus-daemon-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"multus-daemon-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-multus\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.520453 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-l4vw4"] Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.520549 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.520675 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.520870 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.520942 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.522588 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.522788 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.522910 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.522923 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.523065 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.523103 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.523149 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.523251 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.523706 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.525242 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.544414 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.562087 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.577728 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.584047 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.584067 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.584075 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.584086 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.584094 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:58Z","lastTransitionTime":"2025-10-09T09:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.596639 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.618712 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.629650 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.639969 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.651102 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.662240 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672010 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/934ad2b3-3174-4135-be38-73a7f4dd6c23-hosts-file\") pod \"node-resolver-zzrnh\" (UID: \"934ad2b3-3174-4135-be38-73a7f4dd6c23\") " pod="openshift-dns/node-resolver-zzrnh" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672041 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvjrc\" (UniqueName: \"kubernetes.io/projected/421bdfde-a7ad-4e4c-aa0d-624104899b94-kube-api-access-qvjrc\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672068 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-host-run-multus-certs\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672091 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-multus-cni-dir\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672106 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-host-var-lib-cni-bin\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672135 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/f676b5cb-d273-4cac-85de-23ca7b6151b6-rootfs\") pod \"machine-config-daemon-fzkfm\" (UID: \"f676b5cb-d273-4cac-85de-23ca7b6151b6\") " pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672181 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-system-cni-dir\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672242 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/421bdfde-a7ad-4e4c-aa0d-624104899b94-cni-binary-copy\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672265 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f676b5cb-d273-4cac-85de-23ca7b6151b6-mcd-auth-proxy-config\") pod \"machine-config-daemon-fzkfm\" (UID: \"f676b5cb-d273-4cac-85de-23ca7b6151b6\") " pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672302 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6m7g\" (UniqueName: \"kubernetes.io/projected/934ad2b3-3174-4135-be38-73a7f4dd6c23-kube-api-access-m6m7g\") pod \"node-resolver-zzrnh\" (UID: \"934ad2b3-3174-4135-be38-73a7f4dd6c23\") " pod="openshift-dns/node-resolver-zzrnh" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672318 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-cnibin\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672344 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b134fc1d-9d0f-4ebf-a188-92bb8bfdd014-system-cni-dir\") pod \"multus-additional-cni-plugins-l4vw4\" (UID: \"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\") " pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672359 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b134fc1d-9d0f-4ebf-a188-92bb8bfdd014-tuning-conf-dir\") pod \"multus-additional-cni-plugins-l4vw4\" (UID: \"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\") " pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672387 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tfpk\" (UniqueName: \"kubernetes.io/projected/b134fc1d-9d0f-4ebf-a188-92bb8bfdd014-kube-api-access-2tfpk\") pod \"multus-additional-cni-plugins-l4vw4\" (UID: \"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\") " pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672447 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-os-release\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672464 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-etc-kubernetes\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672486 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-host-var-lib-kubelet\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672500 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/421bdfde-a7ad-4e4c-aa0d-624104899b94-multus-daemon-config\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672530 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b134fc1d-9d0f-4ebf-a188-92bb8bfdd014-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-l4vw4\" (UID: \"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\") " pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672546 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-hostroot\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672560 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-host-run-k8s-cni-cncf-io\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672577 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-host-run-netns\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672592 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b134fc1d-9d0f-4ebf-a188-92bb8bfdd014-os-release\") pod \"multus-additional-cni-plugins-l4vw4\" (UID: \"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\") " pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672613 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-host-var-lib-cni-multus\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672626 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-multus-conf-dir\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672642 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f676b5cb-d273-4cac-85de-23ca7b6151b6-proxy-tls\") pod \"machine-config-daemon-fzkfm\" (UID: \"f676b5cb-d273-4cac-85de-23ca7b6151b6\") " pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672657 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b134fc1d-9d0f-4ebf-a188-92bb8bfdd014-cni-binary-copy\") pod \"multus-additional-cni-plugins-l4vw4\" (UID: \"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\") " pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672670 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpqvd\" (UniqueName: \"kubernetes.io/projected/f676b5cb-d273-4cac-85de-23ca7b6151b6-kube-api-access-lpqvd\") pod \"machine-config-daemon-fzkfm\" (UID: \"f676b5cb-d273-4cac-85de-23ca7b6151b6\") " pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672689 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b134fc1d-9d0f-4ebf-a188-92bb8bfdd014-cnibin\") pod \"multus-additional-cni-plugins-l4vw4\" (UID: \"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\") " pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.672702 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-multus-socket-dir-parent\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.679182 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.686538 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.686611 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.686622 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.686634 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.686643 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:58Z","lastTransitionTime":"2025-10-09T09:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.690149 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.699409 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.711681 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.720144 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.728445 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.735370 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.744141 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.752600 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.762161 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.770607 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773078 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b134fc1d-9d0f-4ebf-a188-92bb8bfdd014-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-l4vw4\" (UID: \"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\") " pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773108 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-host-run-k8s-cni-cncf-io\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773126 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-hostroot\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773141 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-host-run-netns\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773156 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b134fc1d-9d0f-4ebf-a188-92bb8bfdd014-os-release\") pod \"multus-additional-cni-plugins-l4vw4\" (UID: \"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\") " pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773169 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-host-var-lib-cni-multus\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773183 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-multus-conf-dir\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773196 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f676b5cb-d273-4cac-85de-23ca7b6151b6-proxy-tls\") pod \"machine-config-daemon-fzkfm\" (UID: \"f676b5cb-d273-4cac-85de-23ca7b6151b6\") " pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773209 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b134fc1d-9d0f-4ebf-a188-92bb8bfdd014-cni-binary-copy\") pod \"multus-additional-cni-plugins-l4vw4\" (UID: \"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\") " pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773223 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-multus-socket-dir-parent\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773245 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpqvd\" (UniqueName: \"kubernetes.io/projected/f676b5cb-d273-4cac-85de-23ca7b6151b6-kube-api-access-lpqvd\") pod \"machine-config-daemon-fzkfm\" (UID: \"f676b5cb-d273-4cac-85de-23ca7b6151b6\") " pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773259 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b134fc1d-9d0f-4ebf-a188-92bb8bfdd014-cnibin\") pod \"multus-additional-cni-plugins-l4vw4\" (UID: \"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\") " pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773275 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/934ad2b3-3174-4135-be38-73a7f4dd6c23-hosts-file\") pod \"node-resolver-zzrnh\" (UID: \"934ad2b3-3174-4135-be38-73a7f4dd6c23\") " pod="openshift-dns/node-resolver-zzrnh" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773288 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvjrc\" (UniqueName: \"kubernetes.io/projected/421bdfde-a7ad-4e4c-aa0d-624104899b94-kube-api-access-qvjrc\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773316 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-host-run-multus-certs\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773338 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-system-cni-dir\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773350 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-multus-cni-dir\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773362 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-host-var-lib-cni-bin\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773374 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/f676b5cb-d273-4cac-85de-23ca7b6151b6-rootfs\") pod \"machine-config-daemon-fzkfm\" (UID: \"f676b5cb-d273-4cac-85de-23ca7b6151b6\") " pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773387 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6m7g\" (UniqueName: \"kubernetes.io/projected/934ad2b3-3174-4135-be38-73a7f4dd6c23-kube-api-access-m6m7g\") pod \"node-resolver-zzrnh\" (UID: \"934ad2b3-3174-4135-be38-73a7f4dd6c23\") " pod="openshift-dns/node-resolver-zzrnh" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773400 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-cnibin\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773413 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/421bdfde-a7ad-4e4c-aa0d-624104899b94-cni-binary-copy\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773439 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f676b5cb-d273-4cac-85de-23ca7b6151b6-mcd-auth-proxy-config\") pod \"machine-config-daemon-fzkfm\" (UID: \"f676b5cb-d273-4cac-85de-23ca7b6151b6\") " pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773455 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b134fc1d-9d0f-4ebf-a188-92bb8bfdd014-system-cni-dir\") pod \"multus-additional-cni-plugins-l4vw4\" (UID: \"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\") " pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773468 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b134fc1d-9d0f-4ebf-a188-92bb8bfdd014-tuning-conf-dir\") pod \"multus-additional-cni-plugins-l4vw4\" (UID: \"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\") " pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773488 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tfpk\" (UniqueName: \"kubernetes.io/projected/b134fc1d-9d0f-4ebf-a188-92bb8bfdd014-kube-api-access-2tfpk\") pod \"multus-additional-cni-plugins-l4vw4\" (UID: \"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\") " pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773500 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-os-release\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773514 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-etc-kubernetes\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773528 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-host-var-lib-kubelet\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773542 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/421bdfde-a7ad-4e4c-aa0d-624104899b94-multus-daemon-config\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.773932 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-host-run-multus-certs\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.774029 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-host-run-k8s-cni-cncf-io\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.774060 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/b134fc1d-9d0f-4ebf-a188-92bb8bfdd014-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-l4vw4\" (UID: \"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\") " pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.774141 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-host-var-lib-cni-bin\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.774126 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-multus-cni-dir\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.774241 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-etc-kubernetes\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.774267 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-host-var-lib-kubelet\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.774334 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-hostroot\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.774094 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-system-cni-dir\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.774447 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-host-run-netns\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.774483 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/b134fc1d-9d0f-4ebf-a188-92bb8bfdd014-os-release\") pod \"multus-additional-cni-plugins-l4vw4\" (UID: \"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\") " pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.774504 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-host-var-lib-cni-multus\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.774518 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-multus-conf-dir\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.774657 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/f676b5cb-d273-4cac-85de-23ca7b6151b6-rootfs\") pod \"machine-config-daemon-fzkfm\" (UID: \"f676b5cb-d273-4cac-85de-23ca7b6151b6\") " pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.774869 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-cnibin\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.774945 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/b134fc1d-9d0f-4ebf-a188-92bb8bfdd014-system-cni-dir\") pod \"multus-additional-cni-plugins-l4vw4\" (UID: \"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\") " pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.775113 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/b134fc1d-9d0f-4ebf-a188-92bb8bfdd014-cnibin\") pod \"multus-additional-cni-plugins-l4vw4\" (UID: \"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\") " pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.775213 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/934ad2b3-3174-4135-be38-73a7f4dd6c23-hosts-file\") pod \"node-resolver-zzrnh\" (UID: \"934ad2b3-3174-4135-be38-73a7f4dd6c23\") " pod="openshift-dns/node-resolver-zzrnh" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.775272 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-multus-socket-dir-parent\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.775290 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/421bdfde-a7ad-4e4c-aa0d-624104899b94-os-release\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.775776 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/421bdfde-a7ad-4e4c-aa0d-624104899b94-cni-binary-copy\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.775861 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f676b5cb-d273-4cac-85de-23ca7b6151b6-mcd-auth-proxy-config\") pod \"machine-config-daemon-fzkfm\" (UID: \"f676b5cb-d273-4cac-85de-23ca7b6151b6\") " pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.775984 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/b134fc1d-9d0f-4ebf-a188-92bb8bfdd014-cni-binary-copy\") pod \"multus-additional-cni-plugins-l4vw4\" (UID: \"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\") " pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.776116 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/b134fc1d-9d0f-4ebf-a188-92bb8bfdd014-tuning-conf-dir\") pod \"multus-additional-cni-plugins-l4vw4\" (UID: \"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\") " pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.778313 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f676b5cb-d273-4cac-85de-23ca7b6151b6-proxy-tls\") pod \"machine-config-daemon-fzkfm\" (UID: \"f676b5cb-d273-4cac-85de-23ca7b6151b6\") " pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.784497 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.788087 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.788107 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.788115 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.788125 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.788133 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:58Z","lastTransitionTime":"2025-10-09T09:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.789592 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6m7g\" (UniqueName: \"kubernetes.io/projected/934ad2b3-3174-4135-be38-73a7f4dd6c23-kube-api-access-m6m7g\") pod \"node-resolver-zzrnh\" (UID: \"934ad2b3-3174-4135-be38-73a7f4dd6c23\") " pod="openshift-dns/node-resolver-zzrnh" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.790495 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpqvd\" (UniqueName: \"kubernetes.io/projected/f676b5cb-d273-4cac-85de-23ca7b6151b6-kube-api-access-lpqvd\") pod \"machine-config-daemon-fzkfm\" (UID: \"f676b5cb-d273-4cac-85de-23ca7b6151b6\") " pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.814570 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:04:58 crc kubenswrapper[4710]: E1009 09:04:58.814764 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.814867 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:04:58 crc kubenswrapper[4710]: E1009 09:04:58.815177 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.814680 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:04:58 crc kubenswrapper[4710]: E1009 09:04:58.815405 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.832397 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.839455 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-zzrnh" Oct 09 09:04:58 crc kubenswrapper[4710]: W1009 09:04:58.840841 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf676b5cb_d273_4cac_85de_23ca7b6151b6.slice/crio-a74b719d2f9536dfd309cdce1ad5cc579acf008c30cbd7c1184a32aa43d41969 WatchSource:0}: Error finding container a74b719d2f9536dfd309cdce1ad5cc579acf008c30cbd7c1184a32aa43d41969: Status 404 returned error can't find the container with id a74b719d2f9536dfd309cdce1ad5cc579acf008c30cbd7c1184a32aa43d41969 Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.891284 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.891316 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.891325 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.891338 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.891348 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:58Z","lastTransitionTime":"2025-10-09T09:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.892700 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-mxql9"] Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.893358 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.896795 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.896946 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.897140 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.897237 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.897394 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.897408 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.897530 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.905960 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-zzrnh" event={"ID":"934ad2b3-3174-4135-be38-73a7f4dd6c23","Type":"ContainerStarted","Data":"ae178a91e456c9cd4652ada5c7dce1cb60644bb22eaa0f11fed84ec95d9b0598"} Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.907326 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerStarted","Data":"a74b719d2f9536dfd309cdce1ad5cc579acf008c30cbd7c1184a32aa43d41969"} Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.907794 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.916876 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.925046 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.932633 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.941725 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.950082 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.958687 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.968780 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.974773 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-run-systemd\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.974803 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-run-ovn\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.974820 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-kubelet\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.974834 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-run-openvswitch\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.974850 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-etc-openvswitch\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.974881 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-run-ovn-kubernetes\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.974926 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-cni-bin\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.974969 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-var-lib-openvswitch\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.974994 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0aae2f40-061f-4e34-abaa-11bafcd40ef6-ovnkube-config\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.975018 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-node-log\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.975031 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-cni-netd\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.975046 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.975074 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0aae2f40-061f-4e34-abaa-11bafcd40ef6-env-overrides\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.975100 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0aae2f40-061f-4e34-abaa-11bafcd40ef6-ovnkube-script-lib\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.975126 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-slash\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.975140 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0aae2f40-061f-4e34-abaa-11bafcd40ef6-ovn-node-metrics-cert\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.975168 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-run-netns\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.975180 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-log-socket\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.975195 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vlxv\" (UniqueName: \"kubernetes.io/projected/0aae2f40-061f-4e34-abaa-11bafcd40ef6-kube-api-access-5vlxv\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.975209 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-systemd-units\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.980240 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.993022 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.993046 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.993055 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.993066 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.993074 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:58Z","lastTransitionTime":"2025-10-09T09:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:58 crc kubenswrapper[4710]: I1009 09:04:58.996314 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:58Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.017377 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:59Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.034390 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:59Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.048850 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:59Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076489 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0aae2f40-061f-4e34-abaa-11bafcd40ef6-ovnkube-script-lib\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076522 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-slash\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076540 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0aae2f40-061f-4e34-abaa-11bafcd40ef6-ovn-node-metrics-cert\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076560 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-run-netns\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076573 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-log-socket\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076593 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vlxv\" (UniqueName: \"kubernetes.io/projected/0aae2f40-061f-4e34-abaa-11bafcd40ef6-kube-api-access-5vlxv\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076610 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-systemd-units\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076628 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-run-systemd\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076640 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-run-ovn\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076653 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-kubelet\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076664 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-run-openvswitch\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076679 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-etc-openvswitch\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076693 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-run-ovn-kubernetes\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076705 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-cni-bin\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076718 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-var-lib-openvswitch\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076736 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0aae2f40-061f-4e34-abaa-11bafcd40ef6-ovnkube-config\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076751 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-cni-netd\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076772 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076785 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0aae2f40-061f-4e34-abaa-11bafcd40ef6-env-overrides\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076803 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-node-log\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076834 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-run-ovn\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076862 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-kubelet\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076891 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-etc-openvswitch\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076901 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-run-openvswitch\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076911 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-run-ovn-kubernetes\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076932 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-cni-bin\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076950 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-var-lib-openvswitch\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.077179 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-log-socket\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.077214 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-slash\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.077275 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-systemd-units\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.077386 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-run-netns\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.077487 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.077516 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-cni-netd\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.077517 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0aae2f40-061f-4e34-abaa-11bafcd40ef6-ovnkube-script-lib\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.077540 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-run-systemd\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.076847 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-node-log\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.077883 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0aae2f40-061f-4e34-abaa-11bafcd40ef6-env-overrides\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.077915 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0aae2f40-061f-4e34-abaa-11bafcd40ef6-ovnkube-config\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.079849 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0aae2f40-061f-4e34-abaa-11bafcd40ef6-ovn-node-metrics-cert\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.090166 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vlxv\" (UniqueName: \"kubernetes.io/projected/0aae2f40-061f-4e34-abaa-11bafcd40ef6-kube-api-access-5vlxv\") pod \"ovnkube-node-mxql9\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.095257 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.095282 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.095291 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.095304 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.095312 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:59Z","lastTransitionTime":"2025-10-09T09:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.196681 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.196716 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.196729 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.196745 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.196754 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:59Z","lastTransitionTime":"2025-10-09T09:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.205131 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:04:59 crc kubenswrapper[4710]: W1009 09:04:59.220141 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0aae2f40_061f_4e34_abaa_11bafcd40ef6.slice/crio-87c78efd541350d9d2e5c655a9516e54339a67890de5ef9b93cac19bd3dd8eec WatchSource:0}: Error finding container 87c78efd541350d9d2e5c655a9516e54339a67890de5ef9b93cac19bd3dd8eec: Status 404 returned error can't find the container with id 87c78efd541350d9d2e5c655a9516e54339a67890de5ef9b93cac19bd3dd8eec Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.298078 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.298113 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.298122 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.298134 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.298143 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:59Z","lastTransitionTime":"2025-10-09T09:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.399752 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.399966 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.399975 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.399988 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.399996 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:59Z","lastTransitionTime":"2025-10-09T09:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.501818 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.501956 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.502037 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.502093 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.502149 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:59Z","lastTransitionTime":"2025-10-09T09:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.603710 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.603735 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.603743 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.603755 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.603763 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:59Z","lastTransitionTime":"2025-10-09T09:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.672699 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.674924 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/421bdfde-a7ad-4e4c-aa0d-624104899b94-multus-daemon-config\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.687770 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.693049 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tfpk\" (UniqueName: \"kubernetes.io/projected/b134fc1d-9d0f-4ebf-a188-92bb8bfdd014-kube-api-access-2tfpk\") pod \"multus-additional-cni-plugins-l4vw4\" (UID: \"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\") " pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.699073 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvjrc\" (UniqueName: \"kubernetes.io/projected/421bdfde-a7ad-4e4c-aa0d-624104899b94-kube-api-access-qvjrc\") pod \"multus-5c9mg\" (UID: \"421bdfde-a7ad-4e4c-aa0d-624104899b94\") " pod="openshift-multus/multus-5c9mg" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.705350 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.705372 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.705379 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.705391 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.705400 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:59Z","lastTransitionTime":"2025-10-09T09:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.726199 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-5c9mg" Oct 09 09:04:59 crc kubenswrapper[4710]: W1009 09:04:59.735269 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod421bdfde_a7ad_4e4c_aa0d_624104899b94.slice/crio-d85f298a7ab634bd3330df829b4588d404d733f6fc7644698027da1d91b3420d WatchSource:0}: Error finding container d85f298a7ab634bd3330df829b4588d404d733f6fc7644698027da1d91b3420d: Status 404 returned error can't find the container with id d85f298a7ab634bd3330df829b4588d404d733f6fc7644698027da1d91b3420d Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.743994 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" Oct 09 09:04:59 crc kubenswrapper[4710]: W1009 09:04:59.753028 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb134fc1d_9d0f_4ebf_a188_92bb8bfdd014.slice/crio-d28e864f2a85c6990d87934edb53168ee4d051e07a405b746e044091010fb6f2 WatchSource:0}: Error finding container d28e864f2a85c6990d87934edb53168ee4d051e07a405b746e044091010fb6f2: Status 404 returned error can't find the container with id d28e864f2a85c6990d87934edb53168ee4d051e07a405b746e044091010fb6f2 Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.807132 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.807167 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.807177 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.807189 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.807198 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:59Z","lastTransitionTime":"2025-10-09T09:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.909640 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.909667 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.909676 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.909687 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.909694 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:04:59Z","lastTransitionTime":"2025-10-09T09:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.912037 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerStarted","Data":"f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e"} Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.912065 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerStarted","Data":"003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a"} Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.913565 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" event={"ID":"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014","Type":"ContainerStarted","Data":"d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941"} Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.913588 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" event={"ID":"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014","Type":"ContainerStarted","Data":"d28e864f2a85c6990d87934edb53168ee4d051e07a405b746e044091010fb6f2"} Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.914953 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5c9mg" event={"ID":"421bdfde-a7ad-4e4c-aa0d-624104899b94","Type":"ContainerStarted","Data":"c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5"} Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.914975 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5c9mg" event={"ID":"421bdfde-a7ad-4e4c-aa0d-624104899b94","Type":"ContainerStarted","Data":"d85f298a7ab634bd3330df829b4588d404d733f6fc7644698027da1d91b3420d"} Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.916532 4710 generic.go:334] "Generic (PLEG): container finished" podID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerID="b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e" exitCode=0 Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.916655 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerDied","Data":"b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e"} Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.916708 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerStarted","Data":"87c78efd541350d9d2e5c655a9516e54339a67890de5ef9b93cac19bd3dd8eec"} Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.919402 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-zzrnh" event={"ID":"934ad2b3-3174-4135-be38-73a7f4dd6c23","Type":"ContainerStarted","Data":"06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b"} Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.923101 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:59Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.941466 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:59Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.950755 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:59Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.959169 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:59Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.967916 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:59Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.977094 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:59Z is after 2025-08-24T17:21:41Z" Oct 09 09:04:59 crc kubenswrapper[4710]: I1009 09:04:59.995785 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:04:59Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.006467 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.011998 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.012030 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.012039 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.012051 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.012059 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:00Z","lastTransitionTime":"2025-10-09T09:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.017307 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.026297 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.034447 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.042708 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.054486 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.064416 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.077000 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.085570 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.093968 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.103699 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.113144 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.113174 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.113184 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.113199 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.113208 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:00Z","lastTransitionTime":"2025-10-09T09:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.118835 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.127554 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.136196 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.145797 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.156813 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.167586 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.175315 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.185176 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.215206 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.215245 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.215254 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.215268 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.215277 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:00Z","lastTransitionTime":"2025-10-09T09:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.316996 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.317028 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.317037 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.317053 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.317062 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:00Z","lastTransitionTime":"2025-10-09T09:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.418741 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.418775 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.418783 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.418795 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.418804 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:00Z","lastTransitionTime":"2025-10-09T09:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.520687 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.520721 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.520729 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.520741 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.520751 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:00Z","lastTransitionTime":"2025-10-09T09:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.622604 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.622647 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.622656 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.622668 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.622677 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:00Z","lastTransitionTime":"2025-10-09T09:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.724925 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.724962 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.724972 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.724986 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.724994 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:00Z","lastTransitionTime":"2025-10-09T09:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.814172 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.814218 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:00 crc kubenswrapper[4710]: E1009 09:05:00.814282 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:00 crc kubenswrapper[4710]: E1009 09:05:00.814368 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.814836 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:00 crc kubenswrapper[4710]: E1009 09:05:00.814909 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.826642 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.826683 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.826692 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.826706 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.826714 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:00Z","lastTransitionTime":"2025-10-09T09:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.923454 4710 generic.go:334] "Generic (PLEG): container finished" podID="b134fc1d-9d0f-4ebf-a188-92bb8bfdd014" containerID="d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941" exitCode=0 Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.923515 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" event={"ID":"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014","Type":"ContainerDied","Data":"d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941"} Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.926963 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerStarted","Data":"0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad"} Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.926999 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerStarted","Data":"5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2"} Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.927009 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerStarted","Data":"d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2"} Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.927017 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerStarted","Data":"f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4"} Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.927025 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerStarted","Data":"fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842"} Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.927032 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerStarted","Data":"d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1"} Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.927916 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.927947 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.927956 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.927970 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.927978 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:00Z","lastTransitionTime":"2025-10-09T09:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.935347 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.948400 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.961771 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.971397 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:00 crc kubenswrapper[4710]: I1009 09:05:00.988648 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:00Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.007578 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.018660 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.028187 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.033725 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.033757 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.033767 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.033780 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.033788 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:01Z","lastTransitionTime":"2025-10-09T09:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.037112 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.050806 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-hrfgb"] Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.051080 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-hrfgb" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.052784 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.053011 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.054780 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.056264 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.058099 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.070277 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.079522 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.088421 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.097593 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.112021 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.120163 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.129913 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.137490 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.137602 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.137686 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.137864 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.138017 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:01Z","lastTransitionTime":"2025-10-09T09:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.142640 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.150918 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.158720 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.165290 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.173082 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.181093 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.189363 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.195701 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.204864 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.207670 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/067eda20-53ab-400e-abb3-eb6184f0f60d-serviceca\") pod \"node-ca-hrfgb\" (UID: \"067eda20-53ab-400e-abb3-eb6184f0f60d\") " pod="openshift-image-registry/node-ca-hrfgb" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.207813 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sfnl7\" (UniqueName: \"kubernetes.io/projected/067eda20-53ab-400e-abb3-eb6184f0f60d-kube-api-access-sfnl7\") pod \"node-ca-hrfgb\" (UID: \"067eda20-53ab-400e-abb3-eb6184f0f60d\") " pod="openshift-image-registry/node-ca-hrfgb" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.207940 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/067eda20-53ab-400e-abb3-eb6184f0f60d-host\") pod \"node-ca-hrfgb\" (UID: \"067eda20-53ab-400e-abb3-eb6184f0f60d\") " pod="openshift-image-registry/node-ca-hrfgb" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.212896 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.240511 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.240587 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.240639 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.240691 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.240737 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:01Z","lastTransitionTime":"2025-10-09T09:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.308740 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/067eda20-53ab-400e-abb3-eb6184f0f60d-serviceca\") pod \"node-ca-hrfgb\" (UID: \"067eda20-53ab-400e-abb3-eb6184f0f60d\") " pod="openshift-image-registry/node-ca-hrfgb" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.308785 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sfnl7\" (UniqueName: \"kubernetes.io/projected/067eda20-53ab-400e-abb3-eb6184f0f60d-kube-api-access-sfnl7\") pod \"node-ca-hrfgb\" (UID: \"067eda20-53ab-400e-abb3-eb6184f0f60d\") " pod="openshift-image-registry/node-ca-hrfgb" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.308802 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/067eda20-53ab-400e-abb3-eb6184f0f60d-host\") pod \"node-ca-hrfgb\" (UID: \"067eda20-53ab-400e-abb3-eb6184f0f60d\") " pod="openshift-image-registry/node-ca-hrfgb" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.308855 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/067eda20-53ab-400e-abb3-eb6184f0f60d-host\") pod \"node-ca-hrfgb\" (UID: \"067eda20-53ab-400e-abb3-eb6184f0f60d\") " pod="openshift-image-registry/node-ca-hrfgb" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.309563 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/067eda20-53ab-400e-abb3-eb6184f0f60d-serviceca\") pod \"node-ca-hrfgb\" (UID: \"067eda20-53ab-400e-abb3-eb6184f0f60d\") " pod="openshift-image-registry/node-ca-hrfgb" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.324357 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sfnl7\" (UniqueName: \"kubernetes.io/projected/067eda20-53ab-400e-abb3-eb6184f0f60d-kube-api-access-sfnl7\") pod \"node-ca-hrfgb\" (UID: \"067eda20-53ab-400e-abb3-eb6184f0f60d\") " pod="openshift-image-registry/node-ca-hrfgb" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.342202 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.342321 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.342398 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.342504 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.342604 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:01Z","lastTransitionTime":"2025-10-09T09:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.362504 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-hrfgb" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.444534 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.444728 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.444737 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.444750 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.444759 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:01Z","lastTransitionTime":"2025-10-09T09:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.546987 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.547007 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.547014 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.547025 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.547032 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:01Z","lastTransitionTime":"2025-10-09T09:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.649155 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.649179 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.649187 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.649196 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.649204 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:01Z","lastTransitionTime":"2025-10-09T09:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.753960 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.753983 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.753990 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.754000 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.754009 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:01Z","lastTransitionTime":"2025-10-09T09:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.856249 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.856280 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.856289 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.856302 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.856314 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:01Z","lastTransitionTime":"2025-10-09T09:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.930741 4710 generic.go:334] "Generic (PLEG): container finished" podID="b134fc1d-9d0f-4ebf-a188-92bb8bfdd014" containerID="ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4" exitCode=0 Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.930804 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" event={"ID":"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014","Type":"ContainerDied","Data":"ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4"} Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.931960 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-hrfgb" event={"ID":"067eda20-53ab-400e-abb3-eb6184f0f60d","Type":"ContainerStarted","Data":"10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3"} Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.931984 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-hrfgb" event={"ID":"067eda20-53ab-400e-abb3-eb6184f0f60d","Type":"ContainerStarted","Data":"50c28cfd7b4fa00b708d7a812dad60588a2e3c3a0a92c85aa8f04c4dd953b6dd"} Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.940551 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.950163 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.958274 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.958305 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.958315 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.958326 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.958335 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:01Z","lastTransitionTime":"2025-10-09T09:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.960027 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.967750 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.974509 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.984419 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:01 crc kubenswrapper[4710]: I1009 09:05:01.992752 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.000462 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:01Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.007115 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.015586 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.024590 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.036392 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.045995 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.055208 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.059959 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.059983 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.059991 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.060002 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.060010 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:02Z","lastTransitionTime":"2025-10-09T09:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.064556 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.073759 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.082401 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.089812 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.098574 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.107219 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.119046 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.127549 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.136490 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.144005 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.151474 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.159382 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.161527 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.161551 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.161561 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.161573 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.161581 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:02Z","lastTransitionTime":"2025-10-09T09:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.166534 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.172761 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.263559 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.263590 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.263600 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.263614 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.263625 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:02Z","lastTransitionTime":"2025-10-09T09:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.365141 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.365169 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.365177 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.365190 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.365198 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:02Z","lastTransitionTime":"2025-10-09T09:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.417291 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:05:02 crc kubenswrapper[4710]: E1009 09:05:02.417462 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:05:10.417417679 +0000 UTC m=+33.907526077 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.466657 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.466695 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.466705 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.466720 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.466728 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:02Z","lastTransitionTime":"2025-10-09T09:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.518372 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.518411 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.518453 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.518472 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:02 crc kubenswrapper[4710]: E1009 09:05:02.518532 4710 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 09:05:02 crc kubenswrapper[4710]: E1009 09:05:02.518537 4710 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 09:05:02 crc kubenswrapper[4710]: E1009 09:05:02.518571 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 09:05:10.518560496 +0000 UTC m=+34.008668893 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 09:05:02 crc kubenswrapper[4710]: E1009 09:05:02.518599 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 09:05:10.518584912 +0000 UTC m=+34.008693309 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 09:05:02 crc kubenswrapper[4710]: E1009 09:05:02.518656 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 09:05:02 crc kubenswrapper[4710]: E1009 09:05:02.518667 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 09:05:02 crc kubenswrapper[4710]: E1009 09:05:02.518676 4710 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:05:02 crc kubenswrapper[4710]: E1009 09:05:02.518699 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 09:05:10.518692014 +0000 UTC m=+34.008800411 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:05:02 crc kubenswrapper[4710]: E1009 09:05:02.518738 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 09:05:02 crc kubenswrapper[4710]: E1009 09:05:02.518746 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 09:05:02 crc kubenswrapper[4710]: E1009 09:05:02.518752 4710 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:05:02 crc kubenswrapper[4710]: E1009 09:05:02.518768 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 09:05:10.518763348 +0000 UTC m=+34.008871745 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.569065 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.569095 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.569104 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.569116 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.569125 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:02Z","lastTransitionTime":"2025-10-09T09:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.671006 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.671037 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.671046 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.671058 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.671066 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:02Z","lastTransitionTime":"2025-10-09T09:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.773267 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.773304 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.773312 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.773324 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.773334 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:02Z","lastTransitionTime":"2025-10-09T09:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.814741 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.814777 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.814779 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:02 crc kubenswrapper[4710]: E1009 09:05:02.814832 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:02 crc kubenswrapper[4710]: E1009 09:05:02.814955 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:02 crc kubenswrapper[4710]: E1009 09:05:02.815032 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.874614 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.874648 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.874658 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.874669 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.874678 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:02Z","lastTransitionTime":"2025-10-09T09:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.936987 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerStarted","Data":"6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d"} Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.938633 4710 generic.go:334] "Generic (PLEG): container finished" podID="b134fc1d-9d0f-4ebf-a188-92bb8bfdd014" containerID="e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3" exitCode=0 Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.938659 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" event={"ID":"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014","Type":"ContainerDied","Data":"e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3"} Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.949389 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.959141 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.971250 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.976445 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.976467 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.976477 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.976488 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.976496 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:02Z","lastTransitionTime":"2025-10-09T09:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.980515 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.990166 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:02 crc kubenswrapper[4710]: I1009 09:05:02.998656 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:02Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.007625 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:03Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.014919 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:03Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.023094 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:03Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.030305 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:03Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.039163 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:03Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.050187 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:03Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.058054 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:03Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.065515 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:03Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.077756 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.077774 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.077782 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.077795 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.077803 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:03Z","lastTransitionTime":"2025-10-09T09:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.179557 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.179703 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.179712 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.179723 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.179731 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:03Z","lastTransitionTime":"2025-10-09T09:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.283073 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.283114 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.283125 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.283136 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.283144 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:03Z","lastTransitionTime":"2025-10-09T09:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.385024 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.385044 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.385052 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.385062 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.385069 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:03Z","lastTransitionTime":"2025-10-09T09:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.488176 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.488202 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.488209 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.488219 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.488261 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:03Z","lastTransitionTime":"2025-10-09T09:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.590208 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.590256 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.590265 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.590277 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.590285 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:03Z","lastTransitionTime":"2025-10-09T09:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.692659 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.692704 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.692712 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.692723 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.692730 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:03Z","lastTransitionTime":"2025-10-09T09:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.794027 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.794048 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.794056 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.794066 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.794074 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:03Z","lastTransitionTime":"2025-10-09T09:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.895543 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.895568 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.895576 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.895585 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.895594 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:03Z","lastTransitionTime":"2025-10-09T09:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.942794 4710 generic.go:334] "Generic (PLEG): container finished" podID="b134fc1d-9d0f-4ebf-a188-92bb8bfdd014" containerID="974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c" exitCode=0 Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.942827 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" event={"ID":"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014","Type":"ContainerDied","Data":"974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c"} Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.950965 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:03Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.962007 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:03Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.970520 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:03Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.979348 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:03Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.986890 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:03Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.994205 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:03Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.997641 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.997665 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.997674 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.997686 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:03 crc kubenswrapper[4710]: I1009 09:05:03.997693 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:03Z","lastTransitionTime":"2025-10-09T09:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.000002 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:03Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.008188 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:04Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.015666 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:04Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.022866 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:04Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.029050 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:04Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.036398 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:04Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.044171 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:04Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.056080 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:04Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.099168 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.099199 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.099207 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.099218 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.099237 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:04Z","lastTransitionTime":"2025-10-09T09:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.201666 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.201695 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.201703 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.201722 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.201730 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:04Z","lastTransitionTime":"2025-10-09T09:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.303715 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.303741 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.303749 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.303764 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.303773 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:04Z","lastTransitionTime":"2025-10-09T09:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.375874 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.375900 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.375907 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.375917 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.375924 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:04Z","lastTransitionTime":"2025-10-09T09:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:04 crc kubenswrapper[4710]: E1009 09:05:04.383899 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:04Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.386142 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.386168 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.386177 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.386187 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.386195 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:04Z","lastTransitionTime":"2025-10-09T09:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:04 crc kubenswrapper[4710]: E1009 09:05:04.396203 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:04Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.398535 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.398564 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.398573 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.398582 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.398591 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:04Z","lastTransitionTime":"2025-10-09T09:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:04 crc kubenswrapper[4710]: E1009 09:05:04.406359 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:04Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.408367 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.408501 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.408586 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.408651 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.408716 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:04Z","lastTransitionTime":"2025-10-09T09:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:04 crc kubenswrapper[4710]: E1009 09:05:04.416030 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:04Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.418009 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.418099 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.418158 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.418213 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.418277 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:04Z","lastTransitionTime":"2025-10-09T09:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:04 crc kubenswrapper[4710]: E1009 09:05:04.425678 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:04Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:04 crc kubenswrapper[4710]: E1009 09:05:04.425814 4710 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.426828 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.426853 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.426861 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.426870 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.426878 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:04Z","lastTransitionTime":"2025-10-09T09:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.529423 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.529613 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.529633 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.529646 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.529655 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:04Z","lastTransitionTime":"2025-10-09T09:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.630963 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.630995 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.631003 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.631016 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.631025 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:04Z","lastTransitionTime":"2025-10-09T09:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.732895 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.732923 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.732931 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.732942 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.732951 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:04Z","lastTransitionTime":"2025-10-09T09:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.813918 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.813953 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.813959 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:04 crc kubenswrapper[4710]: E1009 09:05:04.814018 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:04 crc kubenswrapper[4710]: E1009 09:05:04.814126 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:04 crc kubenswrapper[4710]: E1009 09:05:04.814194 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.834072 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.834094 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.834101 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.834113 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.834121 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:04Z","lastTransitionTime":"2025-10-09T09:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.935551 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.936116 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.936190 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.936307 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.936364 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:04Z","lastTransitionTime":"2025-10-09T09:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.947800 4710 generic.go:334] "Generic (PLEG): container finished" podID="b134fc1d-9d0f-4ebf-a188-92bb8bfdd014" containerID="763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f" exitCode=0 Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.947852 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" event={"ID":"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014","Type":"ContainerDied","Data":"763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f"} Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.952059 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerStarted","Data":"5d3eb9872ee91a8954cbbe555570d694813b1e8fb1c3652d0c1a5a8fb97a3c99"} Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.952513 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.963088 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:04Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.968002 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.973557 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:04Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.981979 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:04Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.989827 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:04Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:04 crc kubenswrapper[4710]: I1009 09:05:04.997390 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:04Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.004493 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.012385 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.020704 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.028630 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.035956 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.038529 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.038556 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.038565 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.038576 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.038583 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:05Z","lastTransitionTime":"2025-10-09T09:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.042539 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.049999 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.058975 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.074207 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.082134 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.091354 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.099619 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.108765 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.117631 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.129831 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d3eb9872ee91a8954cbbe555570d694813b1e8fb1c3652d0c1a5a8fb97a3c99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.137546 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.140034 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.140061 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.140070 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.140082 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.140090 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:05Z","lastTransitionTime":"2025-10-09T09:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.145441 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.154850 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.161986 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.168594 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.176019 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.183682 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.190786 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.242257 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.242286 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.242295 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.242307 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.242314 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:05Z","lastTransitionTime":"2025-10-09T09:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.344185 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.344215 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.344237 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.344250 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.344259 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:05Z","lastTransitionTime":"2025-10-09T09:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.446312 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.446345 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.446353 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.446365 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.446373 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:05Z","lastTransitionTime":"2025-10-09T09:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.548258 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.548288 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.548296 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.548306 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.548315 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:05Z","lastTransitionTime":"2025-10-09T09:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.649753 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.649782 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.649790 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.649801 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.649810 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:05Z","lastTransitionTime":"2025-10-09T09:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.751472 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.751503 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.751511 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.751520 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.751527 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:05Z","lastTransitionTime":"2025-10-09T09:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.852614 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.852638 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.852646 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.852656 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.852665 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:05Z","lastTransitionTime":"2025-10-09T09:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.953918 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.953955 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.953964 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.953977 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.953987 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:05Z","lastTransitionTime":"2025-10-09T09:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.956775 4710 generic.go:334] "Generic (PLEG): container finished" podID="b134fc1d-9d0f-4ebf-a188-92bb8bfdd014" containerID="7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d" exitCode=0 Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.957396 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" event={"ID":"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014","Type":"ContainerDied","Data":"7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d"} Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.957443 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.957730 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:05:05 crc kubenswrapper[4710]: I1009 09:05:05.993364 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d3eb9872ee91a8954cbbe555570d694813b1e8fb1c3652d0c1a5a8fb97a3c99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:05Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.006358 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.023322 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.042038 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.043076 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.056166 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.056194 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.056202 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.056215 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.056233 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:06Z","lastTransitionTime":"2025-10-09T09:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.058919 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.066558 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.075525 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.085524 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.093930 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.101339 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.112123 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.123224 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.131683 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.138466 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.147396 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.155721 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.157624 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.157650 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.157665 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.157678 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.157686 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:06Z","lastTransitionTime":"2025-10-09T09:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.165242 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.172580 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.185257 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d3eb9872ee91a8954cbbe555570d694813b1e8fb1c3652d0c1a5a8fb97a3c99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.204048 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.217280 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.227643 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.237819 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.245272 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.253685 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.259688 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.259717 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.259724 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.259735 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.259743 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:06Z","lastTransitionTime":"2025-10-09T09:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.262623 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.270092 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.277571 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.362503 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.362525 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.362533 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.362546 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.362555 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:06Z","lastTransitionTime":"2025-10-09T09:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.464869 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.464907 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.464917 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.464929 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.464937 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:06Z","lastTransitionTime":"2025-10-09T09:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.566501 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.566535 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.566544 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.566557 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.566566 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:06Z","lastTransitionTime":"2025-10-09T09:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.668262 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.668294 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.668303 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.668315 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.668324 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:06Z","lastTransitionTime":"2025-10-09T09:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.770534 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.770563 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.770572 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.770583 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.770591 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:06Z","lastTransitionTime":"2025-10-09T09:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.814769 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.814799 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:06 crc kubenswrapper[4710]: E1009 09:05:06.814863 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.814953 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:06 crc kubenswrapper[4710]: E1009 09:05:06.815058 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:06 crc kubenswrapper[4710]: E1009 09:05:06.815091 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.824080 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.833769 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.842050 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.850393 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.858165 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.866149 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.873639 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.873665 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.873679 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.873692 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.873705 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:06Z","lastTransitionTime":"2025-10-09T09:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.879586 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.889534 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.905669 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.918101 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.924670 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.932790 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.940636 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.952512 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d3eb9872ee91a8954cbbe555570d694813b1e8fb1c3652d0c1a5a8fb97a3c99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.960475 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mxql9_0aae2f40-061f-4e34-abaa-11bafcd40ef6/ovnkube-controller/0.log" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.962807 4710 generic.go:334] "Generic (PLEG): container finished" podID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerID="5d3eb9872ee91a8954cbbe555570d694813b1e8fb1c3652d0c1a5a8fb97a3c99" exitCode=1 Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.962891 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerDied","Data":"5d3eb9872ee91a8954cbbe555570d694813b1e8fb1c3652d0c1a5a8fb97a3c99"} Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.963384 4710 scope.go:117] "RemoveContainer" containerID="5d3eb9872ee91a8954cbbe555570d694813b1e8fb1c3652d0c1a5a8fb97a3c99" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.966562 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" event={"ID":"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014","Type":"ContainerStarted","Data":"5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8"} Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.970205 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.975624 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.975801 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.975809 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.975820 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.975829 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:06Z","lastTransitionTime":"2025-10-09T09:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.978717 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.987472 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:06 crc kubenswrapper[4710]: I1009 09:05:06.995298 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.005022 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.017459 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d3eb9872ee91a8954cbbe555570d694813b1e8fb1c3652d0c1a5a8fb97a3c99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d3eb9872ee91a8954cbbe555570d694813b1e8fb1c3652d0c1a5a8fb97a3c99\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"message\\\":\\\"lector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 09:05:06.452016 5914 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 09:05:06.452205 5914 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1009 09:05:06.452357 5914 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 09:05:06.452423 5914 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1009 09:05:06.452546 5914 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 09:05:06.452780 5914 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1009 09:05:06.452791 5914 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1009 09:05:06.452800 5914 factory.go:656] Stopping watch factory\\\\nI1009 09:05:06.452845 5914 ovnkube.go:599] Stopped ovnkube\\\\nI1009 09:05:06.452825 5914 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1009 09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.026328 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.034811 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.044067 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.051994 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.058412 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.068933 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.077765 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.077795 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.077841 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.077857 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.077866 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:07Z","lastTransitionTime":"2025-10-09T09:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.079845 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.088631 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.097875 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.106178 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.114364 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.121064 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.133216 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d3eb9872ee91a8954cbbe555570d694813b1e8fb1c3652d0c1a5a8fb97a3c99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d3eb9872ee91a8954cbbe555570d694813b1e8fb1c3652d0c1a5a8fb97a3c99\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"message\\\":\\\"lector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 09:05:06.452016 5914 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 09:05:06.452205 5914 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1009 09:05:06.452357 5914 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 09:05:06.452423 5914 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1009 09:05:06.452546 5914 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 09:05:06.452780 5914 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1009 09:05:06.452791 5914 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1009 09:05:06.452800 5914 factory.go:656] Stopping watch factory\\\\nI1009 09:05:06.452845 5914 ovnkube.go:599] Stopped ovnkube\\\\nI1009 09:05:06.452825 5914 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1009 09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.143844 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.153466 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.162237 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.172343 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.179586 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.180200 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.180248 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.180259 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.180272 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.180281 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:07Z","lastTransitionTime":"2025-10-09T09:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.192968 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.206109 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.218221 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.225397 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.282301 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.282338 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.282347 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.282361 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.282385 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:07Z","lastTransitionTime":"2025-10-09T09:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.383788 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.383805 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.383812 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.383822 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.383830 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:07Z","lastTransitionTime":"2025-10-09T09:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.485786 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.485824 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.485833 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.485845 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.485853 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:07Z","lastTransitionTime":"2025-10-09T09:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.587669 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.587698 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.587707 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.587720 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.587728 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:07Z","lastTransitionTime":"2025-10-09T09:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.690194 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.690248 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.690257 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.690269 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.690279 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:07Z","lastTransitionTime":"2025-10-09T09:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.791928 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.791958 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.791968 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.791980 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.791988 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:07Z","lastTransitionTime":"2025-10-09T09:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.894380 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.894408 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.894417 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.894452 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.894460 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:07Z","lastTransitionTime":"2025-10-09T09:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.970534 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mxql9_0aae2f40-061f-4e34-abaa-11bafcd40ef6/ovnkube-controller/1.log" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.971222 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mxql9_0aae2f40-061f-4e34-abaa-11bafcd40ef6/ovnkube-controller/0.log" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.973058 4710 generic.go:334] "Generic (PLEG): container finished" podID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerID="f29d16cb1b92a903092ffc6de47798a9cc4fece344c6622c0c6fce9dfe2d2be3" exitCode=1 Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.973088 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerDied","Data":"f29d16cb1b92a903092ffc6de47798a9cc4fece344c6622c0c6fce9dfe2d2be3"} Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.973115 4710 scope.go:117] "RemoveContainer" containerID="5d3eb9872ee91a8954cbbe555570d694813b1e8fb1c3652d0c1a5a8fb97a3c99" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.973559 4710 scope.go:117] "RemoveContainer" containerID="f29d16cb1b92a903092ffc6de47798a9cc4fece344c6622c0c6fce9dfe2d2be3" Oct 09 09:05:07 crc kubenswrapper[4710]: E1009 09:05:07.973679 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.983523 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.993552 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:07Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.995359 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.995382 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.995390 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.995400 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:07 crc kubenswrapper[4710]: I1009 09:05:07.995408 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:07Z","lastTransitionTime":"2025-10-09T09:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.001851 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:08Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.009563 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:08Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.016691 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:08Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.023664 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:08Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.029732 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:08Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.038848 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:08Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.046781 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:08Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.054596 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:08Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.061090 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:08Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.070031 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:08Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.078491 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:08Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.089875 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f29d16cb1b92a903092ffc6de47798a9cc4fece344c6622c0c6fce9dfe2d2be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d3eb9872ee91a8954cbbe555570d694813b1e8fb1c3652d0c1a5a8fb97a3c99\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"message\\\":\\\"lector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 09:05:06.452016 5914 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 09:05:06.452205 5914 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1009 09:05:06.452357 5914 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 09:05:06.452423 5914 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1009 09:05:06.452546 5914 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1009 09:05:06.452780 5914 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1009 09:05:06.452791 5914 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1009 09:05:06.452800 5914 factory.go:656] Stopping watch factory\\\\nI1009 09:05:06.452845 5914 ovnkube.go:599] Stopped ovnkube\\\\nI1009 09:05:06.452825 5914 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1009 09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f29d16cb1b92a903092ffc6de47798a9cc4fece344c6622c0c6fce9dfe2d2be3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:07Z\\\",\\\"message\\\":\\\"mptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.138\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1009 09:05:07.576125 6067 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-zzrnh in node crc\\\\nI1009 09:05:07.576127 6067 services_controller.go:452] Built service openshift-marketplace/redhat-operators per-node LB for network=default: []services.LB{}\\\\nI1009 09:05:07.576021 6067 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1009 09:05:07.576134 6067 services_controller.go:453] Built service openshift-marketplace/redhat-operators template LB for network=default: []services.LB{}\\\\nI1009 09:05:07.576042 6067 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1009 09:05:07.576128 6067 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1009 09:05:07.576140 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:08Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.098979 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.099015 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.099026 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.099117 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.099145 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:08Z","lastTransitionTime":"2025-10-09T09:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.200594 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.200627 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.200637 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.200650 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.200658 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:08Z","lastTransitionTime":"2025-10-09T09:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.301973 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.302175 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.302274 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.302352 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.302417 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:08Z","lastTransitionTime":"2025-10-09T09:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.403770 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.403887 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.403956 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.404019 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.404077 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:08Z","lastTransitionTime":"2025-10-09T09:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.505789 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.505931 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.505993 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.506055 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.506112 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:08Z","lastTransitionTime":"2025-10-09T09:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.608281 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.608308 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.608315 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.608324 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.608332 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:08Z","lastTransitionTime":"2025-10-09T09:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.709899 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.710001 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.710065 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.710126 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.710183 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:08Z","lastTransitionTime":"2025-10-09T09:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.811892 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.811923 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.811933 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.811945 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.811954 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:08Z","lastTransitionTime":"2025-10-09T09:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.814143 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:08 crc kubenswrapper[4710]: E1009 09:05:08.814244 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.814344 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.814401 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:08 crc kubenswrapper[4710]: E1009 09:05:08.814495 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:08 crc kubenswrapper[4710]: E1009 09:05:08.814565 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.913346 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.913562 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.913621 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.913686 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.913740 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:08Z","lastTransitionTime":"2025-10-09T09:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.976348 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mxql9_0aae2f40-061f-4e34-abaa-11bafcd40ef6/ovnkube-controller/1.log" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.978624 4710 scope.go:117] "RemoveContainer" containerID="f29d16cb1b92a903092ffc6de47798a9cc4fece344c6622c0c6fce9dfe2d2be3" Oct 09 09:05:08 crc kubenswrapper[4710]: E1009 09:05:08.978736 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.987038 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:08Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:08 crc kubenswrapper[4710]: I1009 09:05:08.998591 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f29d16cb1b92a903092ffc6de47798a9cc4fece344c6622c0c6fce9dfe2d2be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f29d16cb1b92a903092ffc6de47798a9cc4fece344c6622c0c6fce9dfe2d2be3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:07Z\\\",\\\"message\\\":\\\"mptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.138\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1009 09:05:07.576125 6067 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-zzrnh in node crc\\\\nI1009 09:05:07.576127 6067 services_controller.go:452] Built service openshift-marketplace/redhat-operators per-node LB for network=default: []services.LB{}\\\\nI1009 09:05:07.576021 6067 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1009 09:05:07.576134 6067 services_controller.go:453] Built service openshift-marketplace/redhat-operators template LB for network=default: []services.LB{}\\\\nI1009 09:05:07.576042 6067 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1009 09:05:07.576128 6067 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1009 09:05:07.576140 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:08Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.006259 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:09Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.014941 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:09Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.015535 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.015629 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.015699 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.015764 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.015815 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:09Z","lastTransitionTime":"2025-10-09T09:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.022717 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:09Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.029372 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:09Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.035770 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:09Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.041940 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:09Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.049261 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:09Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.056577 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:09Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.063950 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:09Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.070093 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:09Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.078345 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:09Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.085841 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:09Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.117974 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.117996 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.118003 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.118016 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.118024 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:09Z","lastTransitionTime":"2025-10-09T09:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.220167 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.220200 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.220209 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.220220 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.220238 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:09Z","lastTransitionTime":"2025-10-09T09:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.321759 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.321784 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.321794 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.321806 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.321815 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:09Z","lastTransitionTime":"2025-10-09T09:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.423489 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.423524 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.423534 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.423547 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.423555 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:09Z","lastTransitionTime":"2025-10-09T09:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.524944 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.524972 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.524980 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.524990 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.524998 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:09Z","lastTransitionTime":"2025-10-09T09:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.626413 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.626458 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.626468 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.626480 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.626488 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:09Z","lastTransitionTime":"2025-10-09T09:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.728172 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.728220 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.728238 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.728248 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.728255 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:09Z","lastTransitionTime":"2025-10-09T09:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.830144 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.830170 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.830177 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.830187 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.830194 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:09Z","lastTransitionTime":"2025-10-09T09:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.932118 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.932143 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.932150 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.932159 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:09 crc kubenswrapper[4710]: I1009 09:05:09.932167 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:09Z","lastTransitionTime":"2025-10-09T09:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.034216 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.034259 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.034268 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.034281 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.034291 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:10Z","lastTransitionTime":"2025-10-09T09:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.111775 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.121472 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.131650 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.136083 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.136114 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.136123 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.136134 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.136145 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:10Z","lastTransitionTime":"2025-10-09T09:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.139681 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.147398 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.154007 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.165982 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.174461 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.183147 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.191185 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.197271 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.205391 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.209948 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j"] Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.210296 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.211752 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.212449 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.215158 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.231518 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f29d16cb1b92a903092ffc6de47798a9cc4fece344c6622c0c6fce9dfe2d2be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f29d16cb1b92a903092ffc6de47798a9cc4fece344c6622c0c6fce9dfe2d2be3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:07Z\\\",\\\"message\\\":\\\"mptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.138\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1009 09:05:07.576125 6067 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-zzrnh in node crc\\\\nI1009 09:05:07.576127 6067 services_controller.go:452] Built service openshift-marketplace/redhat-operators per-node LB for network=default: []services.LB{}\\\\nI1009 09:05:07.576021 6067 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1009 09:05:07.576134 6067 services_controller.go:453] Built service openshift-marketplace/redhat-operators template LB for network=default: []services.LB{}\\\\nI1009 09:05:07.576042 6067 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1009 09:05:07.576128 6067 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1009 09:05:07.576140 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.237998 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.238026 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.238035 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.238048 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.238056 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:10Z","lastTransitionTime":"2025-10-09T09:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.242164 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.252302 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.261666 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.269329 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.275828 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.283008 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-b9p6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.293712 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.301975 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.314187 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f29d16cb1b92a903092ffc6de47798a9cc4fece344c6622c0c6fce9dfe2d2be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f29d16cb1b92a903092ffc6de47798a9cc4fece344c6622c0c6fce9dfe2d2be3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:07Z\\\",\\\"message\\\":\\\"mptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.138\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1009 09:05:07.576125 6067 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-zzrnh in node crc\\\\nI1009 09:05:07.576127 6067 services_controller.go:452] Built service openshift-marketplace/redhat-operators per-node LB for network=default: []services.LB{}\\\\nI1009 09:05:07.576021 6067 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1009 09:05:07.576134 6067 services_controller.go:453] Built service openshift-marketplace/redhat-operators template LB for network=default: []services.LB{}\\\\nI1009 09:05:07.576042 6067 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1009 09:05:07.576128 6067 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1009 09:05:07.576140 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.324252 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.333490 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.340154 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.340207 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.340219 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.340250 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.340263 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:10Z","lastTransitionTime":"2025-10-09T09:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.344560 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.352027 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.359375 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.366248 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.372340 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.379749 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-459hs\" (UniqueName: \"kubernetes.io/projected/eaca9555-f8a0-49e8-a266-5f2700cbf9e5-kube-api-access-459hs\") pod \"ovnkube-control-plane-749d76644c-b9p6j\" (UID: \"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.379788 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/eaca9555-f8a0-49e8-a266-5f2700cbf9e5-env-overrides\") pod \"ovnkube-control-plane-749d76644c-b9p6j\" (UID: \"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.379916 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/eaca9555-f8a0-49e8-a266-5f2700cbf9e5-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-b9p6j\" (UID: \"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.380062 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/eaca9555-f8a0-49e8-a266-5f2700cbf9e5-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-b9p6j\" (UID: \"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.442334 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.442388 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.442400 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.442417 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.442457 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:10Z","lastTransitionTime":"2025-10-09T09:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.480721 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.480826 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-459hs\" (UniqueName: \"kubernetes.io/projected/eaca9555-f8a0-49e8-a266-5f2700cbf9e5-kube-api-access-459hs\") pod \"ovnkube-control-plane-749d76644c-b9p6j\" (UID: \"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" Oct 09 09:05:10 crc kubenswrapper[4710]: E1009 09:05:10.480885 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:05:26.480854954 +0000 UTC m=+49.970963352 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.480975 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/eaca9555-f8a0-49e8-a266-5f2700cbf9e5-env-overrides\") pod \"ovnkube-control-plane-749d76644c-b9p6j\" (UID: \"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.481046 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/eaca9555-f8a0-49e8-a266-5f2700cbf9e5-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-b9p6j\" (UID: \"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.481146 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/eaca9555-f8a0-49e8-a266-5f2700cbf9e5-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-b9p6j\" (UID: \"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.481645 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/eaca9555-f8a0-49e8-a266-5f2700cbf9e5-env-overrides\") pod \"ovnkube-control-plane-749d76644c-b9p6j\" (UID: \"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.482008 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/eaca9555-f8a0-49e8-a266-5f2700cbf9e5-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-b9p6j\" (UID: \"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.486099 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/eaca9555-f8a0-49e8-a266-5f2700cbf9e5-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-b9p6j\" (UID: \"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.493842 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-459hs\" (UniqueName: \"kubernetes.io/projected/eaca9555-f8a0-49e8-a266-5f2700cbf9e5-kube-api-access-459hs\") pod \"ovnkube-control-plane-749d76644c-b9p6j\" (UID: \"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.521003 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" Oct 09 09:05:10 crc kubenswrapper[4710]: W1009 09:05:10.529523 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeaca9555_f8a0_49e8_a266_5f2700cbf9e5.slice/crio-a3bd1c3a293901fb1f19d7e49e266a57710752c0f5c51327cd2f170a577db415 WatchSource:0}: Error finding container a3bd1c3a293901fb1f19d7e49e266a57710752c0f5c51327cd2f170a577db415: Status 404 returned error can't find the container with id a3bd1c3a293901fb1f19d7e49e266a57710752c0f5c51327cd2f170a577db415 Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.545063 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.545118 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.545128 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.545144 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.545154 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:10Z","lastTransitionTime":"2025-10-09T09:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.582597 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.582640 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.582675 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.582702 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:10 crc kubenswrapper[4710]: E1009 09:05:10.582857 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 09:05:10 crc kubenswrapper[4710]: E1009 09:05:10.582887 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 09:05:10 crc kubenswrapper[4710]: E1009 09:05:10.582902 4710 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:05:10 crc kubenswrapper[4710]: E1009 09:05:10.582953 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 09:05:10 crc kubenswrapper[4710]: E1009 09:05:10.582974 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 09:05:10 crc kubenswrapper[4710]: E1009 09:05:10.582988 4710 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:05:10 crc kubenswrapper[4710]: E1009 09:05:10.582958 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 09:05:26.582944114 +0000 UTC m=+50.073052511 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:05:10 crc kubenswrapper[4710]: E1009 09:05:10.583049 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 09:05:26.583032251 +0000 UTC m=+50.073140647 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:05:10 crc kubenswrapper[4710]: E1009 09:05:10.582870 4710 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 09:05:10 crc kubenswrapper[4710]: E1009 09:05:10.583078 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 09:05:26.583072155 +0000 UTC m=+50.073180552 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 09:05:10 crc kubenswrapper[4710]: E1009 09:05:10.583124 4710 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 09:05:10 crc kubenswrapper[4710]: E1009 09:05:10.583144 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 09:05:26.583138951 +0000 UTC m=+50.073247348 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.647711 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.647759 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.647775 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.647795 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.647806 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:10Z","lastTransitionTime":"2025-10-09T09:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.749915 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.749956 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.749967 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.749984 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.749993 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:10Z","lastTransitionTime":"2025-10-09T09:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.814315 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.814334 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.814468 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:10 crc kubenswrapper[4710]: E1009 09:05:10.814544 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:10 crc kubenswrapper[4710]: E1009 09:05:10.814483 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:10 crc kubenswrapper[4710]: E1009 09:05:10.814649 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.851495 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.851533 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.851543 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.851557 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.851567 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:10Z","lastTransitionTime":"2025-10-09T09:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.953393 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.953664 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.953673 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.953689 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.953698 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:10Z","lastTransitionTime":"2025-10-09T09:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.985664 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" event={"ID":"eaca9555-f8a0-49e8-a266-5f2700cbf9e5","Type":"ContainerStarted","Data":"3daa27b2a2509659f618bb8072b3ed3e4897519b9fc45b0ff5773475d34fba1f"} Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.985706 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" event={"ID":"eaca9555-f8a0-49e8-a266-5f2700cbf9e5","Type":"ContainerStarted","Data":"431d9382a775d61fe64040d291ea0b11e4630c662a1e667161e3de6cab041027"} Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.985719 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" event={"ID":"eaca9555-f8a0-49e8-a266-5f2700cbf9e5","Type":"ContainerStarted","Data":"a3bd1c3a293901fb1f19d7e49e266a57710752c0f5c51327cd2f170a577db415"} Oct 09 09:05:10 crc kubenswrapper[4710]: I1009 09:05:10.996836 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:10Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.006757 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.015703 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.025184 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.034137 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.041706 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.049420 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.055283 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.055313 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.055321 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.055333 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.055342 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:11Z","lastTransitionTime":"2025-10-09T09:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.058152 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://431d9382a775d61fe64040d291ea0b11e4630c662a1e667161e3de6cab041027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3daa27b2a2509659f618bb8072b3ed3e4897519b9fc45b0ff5773475d34fba1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-b9p6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.067122 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.075830 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.083721 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.091318 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.104772 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f29d16cb1b92a903092ffc6de47798a9cc4fece344c6622c0c6fce9dfe2d2be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f29d16cb1b92a903092ffc6de47798a9cc4fece344c6622c0c6fce9dfe2d2be3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:07Z\\\",\\\"message\\\":\\\"mptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.138\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1009 09:05:07.576125 6067 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-zzrnh in node crc\\\\nI1009 09:05:07.576127 6067 services_controller.go:452] Built service openshift-marketplace/redhat-operators per-node LB for network=default: []services.LB{}\\\\nI1009 09:05:07.576021 6067 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1009 09:05:07.576134 6067 services_controller.go:453] Built service openshift-marketplace/redhat-operators template LB for network=default: []services.LB{}\\\\nI1009 09:05:07.576042 6067 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1009 09:05:07.576128 6067 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1009 09:05:07.576140 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.117339 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.127291 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.158056 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.158087 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.158098 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.158111 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.158120 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:11Z","lastTransitionTime":"2025-10-09T09:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.260357 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.260395 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.260407 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.260424 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.260454 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:11Z","lastTransitionTime":"2025-10-09T09:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.363111 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.363148 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.363157 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.363169 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.363178 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:11Z","lastTransitionTime":"2025-10-09T09:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.464775 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.464838 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.464849 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.464875 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.464891 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:11Z","lastTransitionTime":"2025-10-09T09:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.567529 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.567569 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.567578 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.567595 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.567606 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:11Z","lastTransitionTime":"2025-10-09T09:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.649151 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-p9sh6"] Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.649689 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:11 crc kubenswrapper[4710]: E1009 09:05:11.649764 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.660546 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.669601 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.669717 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.669751 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.669760 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.669777 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.669787 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:11Z","lastTransitionTime":"2025-10-09T09:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.677298 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.686469 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.690758 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs\") pod \"network-metrics-daemon-p9sh6\" (UID: \"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\") " pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.690807 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2bc2\" (UniqueName: \"kubernetes.io/projected/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-kube-api-access-x2bc2\") pod \"network-metrics-daemon-p9sh6\" (UID: \"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\") " pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.695135 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.704461 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.712786 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.722845 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.732197 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.740218 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://431d9382a775d61fe64040d291ea0b11e4630c662a1e667161e3de6cab041027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3daa27b2a2509659f618bb8072b3ed3e4897519b9fc45b0ff5773475d34fba1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-b9p6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.748206 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.758533 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.772449 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.772533 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.772549 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.772569 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.772583 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:11Z","lastTransitionTime":"2025-10-09T09:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.773548 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f29d16cb1b92a903092ffc6de47798a9cc4fece344c6622c0c6fce9dfe2d2be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f29d16cb1b92a903092ffc6de47798a9cc4fece344c6622c0c6fce9dfe2d2be3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:07Z\\\",\\\"message\\\":\\\"mptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.138\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1009 09:05:07.576125 6067 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-zzrnh in node crc\\\\nI1009 09:05:07.576127 6067 services_controller.go:452] Built service openshift-marketplace/redhat-operators per-node LB for network=default: []services.LB{}\\\\nI1009 09:05:07.576021 6067 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1009 09:05:07.576134 6067 services_controller.go:453] Built service openshift-marketplace/redhat-operators template LB for network=default: []services.LB{}\\\\nI1009 09:05:07.576042 6067 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1009 09:05:07.576128 6067 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1009 09:05:07.576140 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.784091 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-p9sh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-p9sh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.791627 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs\") pod \"network-metrics-daemon-p9sh6\" (UID: \"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\") " pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.791668 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2bc2\" (UniqueName: \"kubernetes.io/projected/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-kube-api-access-x2bc2\") pod \"network-metrics-daemon-p9sh6\" (UID: \"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\") " pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:11 crc kubenswrapper[4710]: E1009 09:05:11.791880 4710 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 09:05:11 crc kubenswrapper[4710]: E1009 09:05:11.791989 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs podName:b237d61d-3d37-4b76-afa3-d5fe7119b0b6 nodeName:}" failed. No retries permitted until 2025-10-09 09:05:12.291960428 +0000 UTC m=+35.782068824 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs") pod "network-metrics-daemon-p9sh6" (UID: "b237d61d-3d37-4b76-afa3-d5fe7119b0b6") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.794891 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.806698 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:11Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.810698 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2bc2\" (UniqueName: \"kubernetes.io/projected/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-kube-api-access-x2bc2\") pod \"network-metrics-daemon-p9sh6\" (UID: \"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\") " pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.874949 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.874998 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.875007 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.875021 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.875030 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:11Z","lastTransitionTime":"2025-10-09T09:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.977271 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.977298 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.977307 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.977323 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:11 crc kubenswrapper[4710]: I1009 09:05:11.977335 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:11Z","lastTransitionTime":"2025-10-09T09:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.079819 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.080166 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.080237 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.080304 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.080370 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:12Z","lastTransitionTime":"2025-10-09T09:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.183726 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.184165 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.184284 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.184365 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.184461 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:12Z","lastTransitionTime":"2025-10-09T09:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.286682 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.286714 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.286723 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.286739 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.286748 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:12Z","lastTransitionTime":"2025-10-09T09:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.296326 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs\") pod \"network-metrics-daemon-p9sh6\" (UID: \"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\") " pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:12 crc kubenswrapper[4710]: E1009 09:05:12.296490 4710 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 09:05:12 crc kubenswrapper[4710]: E1009 09:05:12.296544 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs podName:b237d61d-3d37-4b76-afa3-d5fe7119b0b6 nodeName:}" failed. No retries permitted until 2025-10-09 09:05:13.296530403 +0000 UTC m=+36.786638800 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs") pod "network-metrics-daemon-p9sh6" (UID: "b237d61d-3d37-4b76-afa3-d5fe7119b0b6") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.388629 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.388668 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.388679 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.388692 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.388701 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:12Z","lastTransitionTime":"2025-10-09T09:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.490759 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.490795 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.490805 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.490818 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.490829 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:12Z","lastTransitionTime":"2025-10-09T09:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.593746 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.593869 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.593983 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.594053 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.594112 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:12Z","lastTransitionTime":"2025-10-09T09:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.696413 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.696503 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.696517 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.696540 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.696555 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:12Z","lastTransitionTime":"2025-10-09T09:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.799161 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.799353 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.799444 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.799516 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.799575 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:12Z","lastTransitionTime":"2025-10-09T09:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.814551 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.814563 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.814593 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.814566 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:12 crc kubenswrapper[4710]: E1009 09:05:12.814649 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:12 crc kubenswrapper[4710]: E1009 09:05:12.814772 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:12 crc kubenswrapper[4710]: E1009 09:05:12.814824 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:12 crc kubenswrapper[4710]: E1009 09:05:12.814889 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.901804 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.901871 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.901883 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.901910 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:12 crc kubenswrapper[4710]: I1009 09:05:12.901923 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:12Z","lastTransitionTime":"2025-10-09T09:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.004180 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.004221 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.004241 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.004254 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.004264 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:13Z","lastTransitionTime":"2025-10-09T09:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.106289 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.106350 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.106360 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.106385 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.106401 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:13Z","lastTransitionTime":"2025-10-09T09:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.208895 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.208933 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.208943 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.208958 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.208967 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:13Z","lastTransitionTime":"2025-10-09T09:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.305881 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs\") pod \"network-metrics-daemon-p9sh6\" (UID: \"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\") " pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:13 crc kubenswrapper[4710]: E1009 09:05:13.306029 4710 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 09:05:13 crc kubenswrapper[4710]: E1009 09:05:13.306095 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs podName:b237d61d-3d37-4b76-afa3-d5fe7119b0b6 nodeName:}" failed. No retries permitted until 2025-10-09 09:05:15.306078705 +0000 UTC m=+38.796187112 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs") pod "network-metrics-daemon-p9sh6" (UID: "b237d61d-3d37-4b76-afa3-d5fe7119b0b6") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.311488 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.311526 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.311540 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.311559 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.311569 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:13Z","lastTransitionTime":"2025-10-09T09:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.414073 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.414124 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.414136 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.414149 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.414166 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:13Z","lastTransitionTime":"2025-10-09T09:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.516538 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.516604 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.516618 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.516642 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.516660 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:13Z","lastTransitionTime":"2025-10-09T09:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.619126 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.619215 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.619235 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.619250 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.619259 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:13Z","lastTransitionTime":"2025-10-09T09:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.721926 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.721966 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.721974 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.721988 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.721999 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:13Z","lastTransitionTime":"2025-10-09T09:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.823865 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.823897 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.823905 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.823916 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.823923 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:13Z","lastTransitionTime":"2025-10-09T09:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.926274 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.926309 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.926318 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.926335 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:13 crc kubenswrapper[4710]: I1009 09:05:13.926347 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:13Z","lastTransitionTime":"2025-10-09T09:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.028919 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.028984 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.028995 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.029020 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.029035 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:14Z","lastTransitionTime":"2025-10-09T09:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.130353 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.130394 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.130406 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.130422 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.130448 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:14Z","lastTransitionTime":"2025-10-09T09:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.232410 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.232501 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.232515 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.232535 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.232546 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:14Z","lastTransitionTime":"2025-10-09T09:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.334425 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.334514 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.334527 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.334541 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.334551 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:14Z","lastTransitionTime":"2025-10-09T09:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.436455 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.436488 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.436497 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.436514 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.436525 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:14Z","lastTransitionTime":"2025-10-09T09:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.537852 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.537896 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.537905 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.537916 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.537926 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:14Z","lastTransitionTime":"2025-10-09T09:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.614057 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.614085 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.614094 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.614108 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.614115 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:14Z","lastTransitionTime":"2025-10-09T09:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:14 crc kubenswrapper[4710]: E1009 09:05:14.623788 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:14Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.626639 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.626666 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.626675 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.626685 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.626693 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:14Z","lastTransitionTime":"2025-10-09T09:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:14 crc kubenswrapper[4710]: E1009 09:05:14.636278 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:14Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.639341 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.639370 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.639380 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.639389 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.639398 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:14Z","lastTransitionTime":"2025-10-09T09:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:14 crc kubenswrapper[4710]: E1009 09:05:14.648427 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:14Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.651116 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.651139 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.651149 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.651164 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.651174 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:14Z","lastTransitionTime":"2025-10-09T09:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:14 crc kubenswrapper[4710]: E1009 09:05:14.660015 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:14Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.662261 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.662287 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.662299 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.662310 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.662319 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:14Z","lastTransitionTime":"2025-10-09T09:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:14 crc kubenswrapper[4710]: E1009 09:05:14.672521 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:14Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:14 crc kubenswrapper[4710]: E1009 09:05:14.672622 4710 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.673770 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.673802 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.673812 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.673824 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.673833 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:14Z","lastTransitionTime":"2025-10-09T09:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.776012 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.776043 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.776069 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.776088 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.776099 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:14Z","lastTransitionTime":"2025-10-09T09:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.814569 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.814593 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.814609 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:14 crc kubenswrapper[4710]: E1009 09:05:14.814699 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.814738 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:14 crc kubenswrapper[4710]: E1009 09:05:14.814807 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:14 crc kubenswrapper[4710]: E1009 09:05:14.814876 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:14 crc kubenswrapper[4710]: E1009 09:05:14.814946 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.878151 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.878197 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.878207 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.878219 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.878243 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:14Z","lastTransitionTime":"2025-10-09T09:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.979950 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.979998 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.980016 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.980035 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:14 crc kubenswrapper[4710]: I1009 09:05:14.980050 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:14Z","lastTransitionTime":"2025-10-09T09:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.082003 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.082031 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.082039 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.082050 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.082057 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:15Z","lastTransitionTime":"2025-10-09T09:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.183534 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.183561 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.183570 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.183583 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.183591 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:15Z","lastTransitionTime":"2025-10-09T09:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.285457 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.285692 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.285769 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.285831 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.285896 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:15Z","lastTransitionTime":"2025-10-09T09:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.324092 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs\") pod \"network-metrics-daemon-p9sh6\" (UID: \"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\") " pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:15 crc kubenswrapper[4710]: E1009 09:05:15.324195 4710 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 09:05:15 crc kubenswrapper[4710]: E1009 09:05:15.324268 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs podName:b237d61d-3d37-4b76-afa3-d5fe7119b0b6 nodeName:}" failed. No retries permitted until 2025-10-09 09:05:19.324249713 +0000 UTC m=+42.814358110 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs") pod "network-metrics-daemon-p9sh6" (UID: "b237d61d-3d37-4b76-afa3-d5fe7119b0b6") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.387788 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.387835 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.387844 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.387856 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.387867 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:15Z","lastTransitionTime":"2025-10-09T09:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.489611 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.489638 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.489649 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.489662 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.489670 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:15Z","lastTransitionTime":"2025-10-09T09:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.591860 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.591963 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.592029 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.592102 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.592170 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:15Z","lastTransitionTime":"2025-10-09T09:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.693353 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.693483 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.693563 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.693641 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.693704 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:15Z","lastTransitionTime":"2025-10-09T09:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.795854 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.795890 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.795900 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.795912 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.795921 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:15Z","lastTransitionTime":"2025-10-09T09:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.898038 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.898077 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.898086 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.898100 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.898108 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:15Z","lastTransitionTime":"2025-10-09T09:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.999875 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.999911 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.999920 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:15 crc kubenswrapper[4710]: I1009 09:05:15.999969 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:15.999982 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:15Z","lastTransitionTime":"2025-10-09T09:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.102038 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.102073 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.102083 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.102097 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.102106 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:16Z","lastTransitionTime":"2025-10-09T09:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.204010 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.204041 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.204049 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.204060 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.204070 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:16Z","lastTransitionTime":"2025-10-09T09:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.305646 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.305676 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.305687 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.305699 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.305709 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:16Z","lastTransitionTime":"2025-10-09T09:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.407619 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.407765 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.407851 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.407931 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.408004 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:16Z","lastTransitionTime":"2025-10-09T09:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.510112 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.510145 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.510155 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.510167 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.510176 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:16Z","lastTransitionTime":"2025-10-09T09:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.612116 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.612151 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.612162 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.612174 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.612182 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:16Z","lastTransitionTime":"2025-10-09T09:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.714267 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.714300 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.714308 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.714320 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.714330 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:16Z","lastTransitionTime":"2025-10-09T09:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.814271 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.814273 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.814526 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:16 crc kubenswrapper[4710]: E1009 09:05:16.814679 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.814740 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:16 crc kubenswrapper[4710]: E1009 09:05:16.814852 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:16 crc kubenswrapper[4710]: E1009 09:05:16.815071 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:16 crc kubenswrapper[4710]: E1009 09:05:16.815215 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.815827 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.815853 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.815862 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.815874 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.815882 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:16Z","lastTransitionTime":"2025-10-09T09:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.825351 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.834545 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.842199 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.849605 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.856322 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.862398 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.873324 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.882705 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.890398 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.896927 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.903700 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://431d9382a775d61fe64040d291ea0b11e4630c662a1e667161e3de6cab041027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3daa27b2a2509659f618bb8072b3ed3e4897519b9fc45b0ff5773475d34fba1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-b9p6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.912158 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.920588 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.920614 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.920622 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.920634 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.920642 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:16Z","lastTransitionTime":"2025-10-09T09:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.923789 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.935900 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f29d16cb1b92a903092ffc6de47798a9cc4fece344c6622c0c6fce9dfe2d2be3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f29d16cb1b92a903092ffc6de47798a9cc4fece344c6622c0c6fce9dfe2d2be3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:07Z\\\",\\\"message\\\":\\\"mptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.138\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1009 09:05:07.576125 6067 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-zzrnh in node crc\\\\nI1009 09:05:07.576127 6067 services_controller.go:452] Built service openshift-marketplace/redhat-operators per-node LB for network=default: []services.LB{}\\\\nI1009 09:05:07.576021 6067 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1009 09:05:07.576134 6067 services_controller.go:453] Built service openshift-marketplace/redhat-operators template LB for network=default: []services.LB{}\\\\nI1009 09:05:07.576042 6067 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1009 09:05:07.576128 6067 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1009 09:05:07.576140 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.944239 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-p9sh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-p9sh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:16 crc kubenswrapper[4710]: I1009 09:05:16.951949 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.022187 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.022349 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.022417 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.022519 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.022574 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:17Z","lastTransitionTime":"2025-10-09T09:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.123924 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.124021 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.124084 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.124147 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.124214 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:17Z","lastTransitionTime":"2025-10-09T09:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.226611 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.226906 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.226978 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.227043 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.227101 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:17Z","lastTransitionTime":"2025-10-09T09:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.328745 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.328777 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.328785 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.328799 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.328808 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:17Z","lastTransitionTime":"2025-10-09T09:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.430206 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.430245 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.430253 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.430264 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.430273 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:17Z","lastTransitionTime":"2025-10-09T09:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.531795 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.531832 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.531840 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.531854 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.531864 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:17Z","lastTransitionTime":"2025-10-09T09:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.633317 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.633387 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.633398 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.633412 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.633422 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:17Z","lastTransitionTime":"2025-10-09T09:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.735309 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.735337 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.735345 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.735356 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.735366 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:17Z","lastTransitionTime":"2025-10-09T09:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.836904 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.836928 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.836935 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.836945 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.836953 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:17Z","lastTransitionTime":"2025-10-09T09:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.938310 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.938335 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.938343 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.938352 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:17 crc kubenswrapper[4710]: I1009 09:05:17.938359 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:17Z","lastTransitionTime":"2025-10-09T09:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.039918 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.039945 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.039952 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.039961 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.039968 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:18Z","lastTransitionTime":"2025-10-09T09:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.141611 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.141646 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.141654 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.141666 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.141675 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:18Z","lastTransitionTime":"2025-10-09T09:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.243736 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.243765 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.243775 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.243787 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.243797 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:18Z","lastTransitionTime":"2025-10-09T09:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.345887 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.345919 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.345929 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.345953 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.345962 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:18Z","lastTransitionTime":"2025-10-09T09:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.447489 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.447520 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.447528 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.447540 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.447548 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:18Z","lastTransitionTime":"2025-10-09T09:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.549044 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.549071 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.549081 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.549092 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.549101 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:18Z","lastTransitionTime":"2025-10-09T09:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.651142 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.651182 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.651195 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.651214 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.651238 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:18Z","lastTransitionTime":"2025-10-09T09:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.753210 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.753249 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.753258 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.753269 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.753277 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:18Z","lastTransitionTime":"2025-10-09T09:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.814870 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.814909 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:18 crc kubenswrapper[4710]: E1009 09:05:18.814970 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.815026 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.815028 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:18 crc kubenswrapper[4710]: E1009 09:05:18.815095 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:18 crc kubenswrapper[4710]: E1009 09:05:18.815151 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:18 crc kubenswrapper[4710]: E1009 09:05:18.815201 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.855711 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.855737 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.855744 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.855755 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.855763 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:18Z","lastTransitionTime":"2025-10-09T09:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.956967 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.957202 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.957210 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.957229 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:18 crc kubenswrapper[4710]: I1009 09:05:18.957238 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:18Z","lastTransitionTime":"2025-10-09T09:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.058931 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.058955 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.058963 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.058976 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.058983 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:19Z","lastTransitionTime":"2025-10-09T09:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.161209 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.161248 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.161257 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.161268 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.161277 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:19Z","lastTransitionTime":"2025-10-09T09:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.263340 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.263532 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.263543 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.263557 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.263566 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:19Z","lastTransitionTime":"2025-10-09T09:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.354135 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs\") pod \"network-metrics-daemon-p9sh6\" (UID: \"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\") " pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:19 crc kubenswrapper[4710]: E1009 09:05:19.354270 4710 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 09:05:19 crc kubenswrapper[4710]: E1009 09:05:19.354347 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs podName:b237d61d-3d37-4b76-afa3-d5fe7119b0b6 nodeName:}" failed. No retries permitted until 2025-10-09 09:05:27.354330456 +0000 UTC m=+50.844438854 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs") pod "network-metrics-daemon-p9sh6" (UID: "b237d61d-3d37-4b76-afa3-d5fe7119b0b6") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.365034 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.365085 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.365095 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.365108 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.365118 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:19Z","lastTransitionTime":"2025-10-09T09:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.466827 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.466860 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.466871 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.466884 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.466892 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:19Z","lastTransitionTime":"2025-10-09T09:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.568773 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.568827 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.568836 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.568850 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.568859 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:19Z","lastTransitionTime":"2025-10-09T09:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.670121 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.670139 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.670147 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.670157 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.670165 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:19Z","lastTransitionTime":"2025-10-09T09:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.771667 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.771714 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.771724 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.771735 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.771743 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:19Z","lastTransitionTime":"2025-10-09T09:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.873752 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.873781 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.873790 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.873803 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.873811 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:19Z","lastTransitionTime":"2025-10-09T09:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.975947 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.975986 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.975994 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.976007 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:19 crc kubenswrapper[4710]: I1009 09:05:19.976017 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:19Z","lastTransitionTime":"2025-10-09T09:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.077845 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.077993 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.078062 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.078136 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.078208 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:20Z","lastTransitionTime":"2025-10-09T09:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.179844 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.179883 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.179894 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.179905 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.179912 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:20Z","lastTransitionTime":"2025-10-09T09:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.281280 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.281305 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.281313 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.281324 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.281332 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:20Z","lastTransitionTime":"2025-10-09T09:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.382788 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.382831 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.382839 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.382850 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.382859 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:20Z","lastTransitionTime":"2025-10-09T09:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.484140 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.484186 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.484194 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.484206 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.484215 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:20Z","lastTransitionTime":"2025-10-09T09:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.585846 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.585870 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.585877 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.585887 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.585895 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:20Z","lastTransitionTime":"2025-10-09T09:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.687453 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.687483 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.687493 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.687503 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.687510 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:20Z","lastTransitionTime":"2025-10-09T09:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.789847 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.789963 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.790037 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.790120 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.790186 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:20Z","lastTransitionTime":"2025-10-09T09:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.814163 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.814181 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.814210 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:20 crc kubenswrapper[4710]: E1009 09:05:20.814277 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.814289 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:20 crc kubenswrapper[4710]: E1009 09:05:20.814375 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:20 crc kubenswrapper[4710]: E1009 09:05:20.814448 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:20 crc kubenswrapper[4710]: E1009 09:05:20.814489 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.895418 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.895461 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.895469 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.895480 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.895488 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:20Z","lastTransitionTime":"2025-10-09T09:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.997470 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.997499 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.997523 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.997536 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:20 crc kubenswrapper[4710]: I1009 09:05:20.997545 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:20Z","lastTransitionTime":"2025-10-09T09:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.099643 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.099671 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.099680 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.099691 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.099700 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:21Z","lastTransitionTime":"2025-10-09T09:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.201699 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.201731 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.201741 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.201750 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.201758 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:21Z","lastTransitionTime":"2025-10-09T09:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.303523 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.303544 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.303552 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.303562 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.303570 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:21Z","lastTransitionTime":"2025-10-09T09:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.405704 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.405738 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.405746 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.405757 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.405768 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:21Z","lastTransitionTime":"2025-10-09T09:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.507326 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.507355 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.507363 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.507377 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.507385 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:21Z","lastTransitionTime":"2025-10-09T09:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.609086 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.609114 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.609123 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.609135 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.609143 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:21Z","lastTransitionTime":"2025-10-09T09:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.710774 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.710810 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.710818 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.710833 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.710841 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:21Z","lastTransitionTime":"2025-10-09T09:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.813096 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.813296 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.813394 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.813484 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.813546 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:21Z","lastTransitionTime":"2025-10-09T09:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.915585 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.915826 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.915906 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.915972 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:21 crc kubenswrapper[4710]: I1009 09:05:21.916042 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:21Z","lastTransitionTime":"2025-10-09T09:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.017445 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.017478 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.017486 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.017499 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.017506 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:22Z","lastTransitionTime":"2025-10-09T09:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.118933 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.118959 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.118968 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.118980 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.118988 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:22Z","lastTransitionTime":"2025-10-09T09:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.220788 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.220817 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.220841 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.220854 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.220862 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:22Z","lastTransitionTime":"2025-10-09T09:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.323129 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.323160 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.323168 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.323182 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.323190 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:22Z","lastTransitionTime":"2025-10-09T09:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.424501 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.424533 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.424541 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.424555 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.424562 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:22Z","lastTransitionTime":"2025-10-09T09:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.526037 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.526075 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.526086 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.526103 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.526115 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:22Z","lastTransitionTime":"2025-10-09T09:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.627824 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.627851 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.627859 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.627870 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.627878 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:22Z","lastTransitionTime":"2025-10-09T09:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.729699 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.729732 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.729740 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.729751 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.729760 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:22Z","lastTransitionTime":"2025-10-09T09:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.813862 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.813920 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:22 crc kubenswrapper[4710]: E1009 09:05:22.813952 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:22 crc kubenswrapper[4710]: E1009 09:05:22.814019 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.814077 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:22 crc kubenswrapper[4710]: E1009 09:05:22.814122 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.814209 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:22 crc kubenswrapper[4710]: E1009 09:05:22.814519 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.814680 4710 scope.go:117] "RemoveContainer" containerID="f29d16cb1b92a903092ffc6de47798a9cc4fece344c6622c0c6fce9dfe2d2be3" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.831785 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.831812 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.831824 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.831834 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.831843 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:22Z","lastTransitionTime":"2025-10-09T09:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.933263 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.933299 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.933314 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.933334 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:22 crc kubenswrapper[4710]: I1009 09:05:22.933344 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:22Z","lastTransitionTime":"2025-10-09T09:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.020401 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mxql9_0aae2f40-061f-4e34-abaa-11bafcd40ef6/ovnkube-controller/1.log" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.022572 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerStarted","Data":"2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37"} Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.022884 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.033902 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:23Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.038270 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.038309 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.038318 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.038329 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.038344 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:23Z","lastTransitionTime":"2025-10-09T09:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.049728 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:23Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.064384 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:23Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.074573 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:23Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.081969 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:23Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.093937 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:23Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.103871 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:23Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.111985 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:23Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.118395 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:23Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.125939 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://431d9382a775d61fe64040d291ea0b11e4630c662a1e667161e3de6cab041027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3daa27b2a2509659f618bb8072b3ed3e4897519b9fc45b0ff5773475d34fba1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-b9p6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:23Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.132896 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-p9sh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-p9sh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:23Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.140621 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.140651 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.140659 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.140671 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.140679 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:23Z","lastTransitionTime":"2025-10-09T09:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.141372 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:23Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.149939 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:23Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.161898 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f29d16cb1b92a903092ffc6de47798a9cc4fece344c6622c0c6fce9dfe2d2be3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:07Z\\\",\\\"message\\\":\\\"mptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.138\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1009 09:05:07.576125 6067 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-zzrnh in node crc\\\\nI1009 09:05:07.576127 6067 services_controller.go:452] Built service openshift-marketplace/redhat-operators per-node LB for network=default: []services.LB{}\\\\nI1009 09:05:07.576021 6067 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1009 09:05:07.576134 6067 services_controller.go:453] Built service openshift-marketplace/redhat-operators template LB for network=default: []services.LB{}\\\\nI1009 09:05:07.576042 6067 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1009 09:05:07.576128 6067 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1009 09:05:07.576140 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:23Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.170272 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:23Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.179410 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:23Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.242067 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.242103 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.242113 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.242128 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.242138 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:23Z","lastTransitionTime":"2025-10-09T09:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.344078 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.344659 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.344728 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.344792 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.344861 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:23Z","lastTransitionTime":"2025-10-09T09:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.446905 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.446942 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.446951 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.446966 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.446975 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:23Z","lastTransitionTime":"2025-10-09T09:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.548817 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.548850 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.548858 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.548869 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.548877 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:23Z","lastTransitionTime":"2025-10-09T09:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.650887 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.650909 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.650971 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.650986 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.651020 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:23Z","lastTransitionTime":"2025-10-09T09:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.753088 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.753109 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.753117 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.753126 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.753151 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:23Z","lastTransitionTime":"2025-10-09T09:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.855217 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.855303 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.855317 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.855344 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.855356 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:23Z","lastTransitionTime":"2025-10-09T09:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.956881 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.956908 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.956918 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.956928 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:23 crc kubenswrapper[4710]: I1009 09:05:23.956952 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:23Z","lastTransitionTime":"2025-10-09T09:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.025566 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mxql9_0aae2f40-061f-4e34-abaa-11bafcd40ef6/ovnkube-controller/2.log" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.026031 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mxql9_0aae2f40-061f-4e34-abaa-11bafcd40ef6/ovnkube-controller/1.log" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.027985 4710 generic.go:334] "Generic (PLEG): container finished" podID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerID="2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37" exitCode=1 Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.028012 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerDied","Data":"2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37"} Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.028037 4710 scope.go:117] "RemoveContainer" containerID="f29d16cb1b92a903092ffc6de47798a9cc4fece344c6622c0c6fce9dfe2d2be3" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.028489 4710 scope.go:117] "RemoveContainer" containerID="2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37" Oct 09 09:05:24 crc kubenswrapper[4710]: E1009 09:05:24.028598 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.040507 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:24Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.050659 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:24Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.057270 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:24Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.058320 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.058348 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.058358 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.058370 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.058379 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:24Z","lastTransitionTime":"2025-10-09T09:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.065946 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:24Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.073070 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:24Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.080508 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:24Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.087919 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:24Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.095934 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://431d9382a775d61fe64040d291ea0b11e4630c662a1e667161e3de6cab041027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3daa27b2a2509659f618bb8072b3ed3e4897519b9fc45b0ff5773475d34fba1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-b9p6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:24Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.104096 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:24Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.112154 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:24Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.122991 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:24Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.129363 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:24Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.141068 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f29d16cb1b92a903092ffc6de47798a9cc4fece344c6622c0c6fce9dfe2d2be3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:07Z\\\",\\\"message\\\":\\\"mptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.138\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1009 09:05:07.576125 6067 ovn.go:134] Ensuring zone local for Pod openshift-dns/node-resolver-zzrnh in node crc\\\\nI1009 09:05:07.576127 6067 services_controller.go:452] Built service openshift-marketplace/redhat-operators per-node LB for network=default: []services.LB{}\\\\nI1009 09:05:07.576021 6067 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1009 09:05:07.576134 6067 services_controller.go:453] Built service openshift-marketplace/redhat-operators template LB for network=default: []services.LB{}\\\\nI1009 09:05:07.576042 6067 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1009 09:05:07.576128 6067 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1009 09:05:07.576140 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:07Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:23Z\\\",\\\"message\\\":\\\" network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.245\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.245\\\\\\\", Port:9192, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1009 09:05:23.414336 6297 services_controller.go:452] Built service openshift-machine-api/cluster-autoscaler-operator per-node LB for network=default: []services.LB{}\\\\nI1009 09:05:23.414337 6297 services_controller.go:360] Finished syncing service metrics on namespace openshift-network-operator for network=default : 549.295µs\\\\nI1009 09:05:23.414342 6297 services_controller.go:453] Built service openshift-machine-api/cluster-autoscaler-operator template LB for network=default: []services.LB{}\\\\nI1009 09:05:23.414344 6297 services_controller.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:24Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.148764 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-p9sh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-p9sh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:24Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.156847 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:24Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.159949 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.159981 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.159991 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.160003 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.160012 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:24Z","lastTransitionTime":"2025-10-09T09:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.166505 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:24Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.261893 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.261927 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.261937 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.261949 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.261957 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:24Z","lastTransitionTime":"2025-10-09T09:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.363712 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.363758 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.363769 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.363781 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.363788 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:24Z","lastTransitionTime":"2025-10-09T09:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.465455 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.465484 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.465494 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.465506 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.465514 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:24Z","lastTransitionTime":"2025-10-09T09:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.567084 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.567112 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.567120 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.567131 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.567139 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:24Z","lastTransitionTime":"2025-10-09T09:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.669125 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.669160 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.669168 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.669183 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.669191 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:24Z","lastTransitionTime":"2025-10-09T09:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.770641 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.770670 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.770679 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.770689 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.770696 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:24Z","lastTransitionTime":"2025-10-09T09:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.814452 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.814493 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:24 crc kubenswrapper[4710]: E1009 09:05:24.814541 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.814630 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:24 crc kubenswrapper[4710]: E1009 09:05:24.814692 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:24 crc kubenswrapper[4710]: E1009 09:05:24.814732 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.814806 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:24 crc kubenswrapper[4710]: E1009 09:05:24.814909 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.872982 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.873136 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.873195 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.873282 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.873352 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:24Z","lastTransitionTime":"2025-10-09T09:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.974670 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.974722 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.974732 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.974746 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:24 crc kubenswrapper[4710]: I1009 09:05:24.974755 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:24Z","lastTransitionTime":"2025-10-09T09:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.013842 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.013891 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.013900 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.013912 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.013920 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:25Z","lastTransitionTime":"2025-10-09T09:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:25 crc kubenswrapper[4710]: E1009 09:05:25.022163 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:25Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.024587 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.024670 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.024726 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.024777 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.024825 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:25Z","lastTransitionTime":"2025-10-09T09:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.030976 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mxql9_0aae2f40-061f-4e34-abaa-11bafcd40ef6/ovnkube-controller/2.log" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.033577 4710 scope.go:117] "RemoveContainer" containerID="2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37" Oct 09 09:05:25 crc kubenswrapper[4710]: E1009 09:05:25.033753 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" Oct 09 09:05:25 crc kubenswrapper[4710]: E1009 09:05:25.033760 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:25Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.035964 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.035987 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.035995 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.036007 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.036015 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:25Z","lastTransitionTime":"2025-10-09T09:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.041348 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:25Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:25 crc kubenswrapper[4710]: E1009 09:05:25.043483 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:25Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.045411 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.045518 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.045592 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.045648 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.045696 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:25Z","lastTransitionTime":"2025-10-09T09:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.048949 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:25Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:25 crc kubenswrapper[4710]: E1009 09:05:25.053270 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:25Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.056384 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.056422 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.056448 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.056461 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.056473 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:25Z","lastTransitionTime":"2025-10-09T09:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.060825 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:23Z\\\",\\\"message\\\":\\\" network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.245\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.245\\\\\\\", Port:9192, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1009 09:05:23.414336 6297 services_controller.go:452] Built service openshift-machine-api/cluster-autoscaler-operator per-node LB for network=default: []services.LB{}\\\\nI1009 09:05:23.414337 6297 services_controller.go:360] Finished syncing service metrics on namespace openshift-network-operator for network=default : 549.295µs\\\\nI1009 09:05:23.414342 6297 services_controller.go:453] Built service openshift-machine-api/cluster-autoscaler-operator template LB for network=default: []services.LB{}\\\\nI1009 09:05:23.414344 6297 services_controller.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:25Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:25 crc kubenswrapper[4710]: E1009 09:05:25.065257 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:25Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:25 crc kubenswrapper[4710]: E1009 09:05:25.065365 4710 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.068892 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-p9sh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-p9sh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:25Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.076233 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.076344 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.076456 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.076544 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.076601 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:25Z","lastTransitionTime":"2025-10-09T09:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.077703 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:25Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.086500 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:25Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.101062 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:25Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.108366 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:25Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.116707 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:25Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.124146 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:25Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.130373 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:25Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.138829 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:25Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.146491 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:25Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.153844 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:25Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.160102 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:25Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.166696 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://431d9382a775d61fe64040d291ea0b11e4630c662a1e667161e3de6cab041027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3daa27b2a2509659f618bb8072b3ed3e4897519b9fc45b0ff5773475d34fba1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-b9p6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:25Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.177916 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.177940 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.177949 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.177976 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.177984 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:25Z","lastTransitionTime":"2025-10-09T09:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.280065 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.280103 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.280115 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.280128 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.280137 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:25Z","lastTransitionTime":"2025-10-09T09:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.381612 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.381644 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.381670 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.381682 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.381691 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:25Z","lastTransitionTime":"2025-10-09T09:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.483402 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.483447 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.483456 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.483465 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.483474 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:25Z","lastTransitionTime":"2025-10-09T09:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.584881 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.584917 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.584927 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.584942 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.584950 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:25Z","lastTransitionTime":"2025-10-09T09:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.686614 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.686654 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.686666 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.686679 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.686688 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:25Z","lastTransitionTime":"2025-10-09T09:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.788793 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.788818 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.788826 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.788836 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.788844 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:25Z","lastTransitionTime":"2025-10-09T09:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.891059 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.891088 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.891096 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.891106 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.891114 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:25Z","lastTransitionTime":"2025-10-09T09:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.992887 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.992914 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.992922 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.992934 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:25 crc kubenswrapper[4710]: I1009 09:05:25.992943 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:25Z","lastTransitionTime":"2025-10-09T09:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.094948 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.094975 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.094983 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.094993 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.095001 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:26Z","lastTransitionTime":"2025-10-09T09:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.196804 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.196827 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.196835 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.196844 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.196852 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:26Z","lastTransitionTime":"2025-10-09T09:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.298144 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.298172 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.298180 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.298190 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.298199 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:26Z","lastTransitionTime":"2025-10-09T09:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.400335 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.400371 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.400379 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.400391 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.400401 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:26Z","lastTransitionTime":"2025-10-09T09:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.501812 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.501842 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.501851 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.501862 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.501871 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:26Z","lastTransitionTime":"2025-10-09T09:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.506098 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:05:26 crc kubenswrapper[4710]: E1009 09:05:26.506173 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:05:58.506157786 +0000 UTC m=+81.996266183 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.603286 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.603319 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.603330 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.603344 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.603354 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:26Z","lastTransitionTime":"2025-10-09T09:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.606587 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.606622 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.606655 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.606673 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:26 crc kubenswrapper[4710]: E1009 09:05:26.606672 4710 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 09:05:26 crc kubenswrapper[4710]: E1009 09:05:26.606721 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 09:05:26 crc kubenswrapper[4710]: E1009 09:05:26.606736 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 09:05:26 crc kubenswrapper[4710]: E1009 09:05:26.606746 4710 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:05:26 crc kubenswrapper[4710]: E1009 09:05:26.606753 4710 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 09:05:26 crc kubenswrapper[4710]: E1009 09:05:26.606726 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 09:05:58.606718305 +0000 UTC m=+82.096826703 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 09:05:26 crc kubenswrapper[4710]: E1009 09:05:26.606778 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 09:05:26 crc kubenswrapper[4710]: E1009 09:05:26.606791 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 09:05:58.606777787 +0000 UTC m=+82.096886184 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 09:05:26 crc kubenswrapper[4710]: E1009 09:05:26.606798 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 09:05:26 crc kubenswrapper[4710]: E1009 09:05:26.606807 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 09:05:58.606801712 +0000 UTC m=+82.096910109 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:05:26 crc kubenswrapper[4710]: E1009 09:05:26.606810 4710 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:05:26 crc kubenswrapper[4710]: E1009 09:05:26.606844 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 09:05:58.606834013 +0000 UTC m=+82.096942420 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.705364 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.705386 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.705394 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.705405 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.705413 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:26Z","lastTransitionTime":"2025-10-09T09:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.807405 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.807449 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.807459 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.807470 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.807502 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:26Z","lastTransitionTime":"2025-10-09T09:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.814914 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.814930 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.815008 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:26 crc kubenswrapper[4710]: E1009 09:05:26.815008 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:26 crc kubenswrapper[4710]: E1009 09:05:26.815168 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:26 crc kubenswrapper[4710]: E1009 09:05:26.815421 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.815469 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:26 crc kubenswrapper[4710]: E1009 09:05:26.815656 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.828515 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:23Z\\\",\\\"message\\\":\\\" network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.245\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.245\\\\\\\", Port:9192, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1009 09:05:23.414336 6297 services_controller.go:452] Built service openshift-machine-api/cluster-autoscaler-operator per-node LB for network=default: []services.LB{}\\\\nI1009 09:05:23.414337 6297 services_controller.go:360] Finished syncing service metrics on namespace openshift-network-operator for network=default : 549.295µs\\\\nI1009 09:05:23.414342 6297 services_controller.go:453] Built service openshift-machine-api/cluster-autoscaler-operator template LB for network=default: []services.LB{}\\\\nI1009 09:05:23.414344 6297 services_controller.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:26Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.835318 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-p9sh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-p9sh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:26Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.843136 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:26Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.851099 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:26Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.858824 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:26Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.884587 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:26Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.897518 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:26Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.909441 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.909477 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.909486 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.909498 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.909508 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:26Z","lastTransitionTime":"2025-10-09T09:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.914392 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:26Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.923784 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:26Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.931471 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:26Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.938999 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:26Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.946299 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://431d9382a775d61fe64040d291ea0b11e4630c662a1e667161e3de6cab041027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3daa27b2a2509659f618bb8072b3ed3e4897519b9fc45b0ff5773475d34fba1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-b9p6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:26Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.954452 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:26Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.964082 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:26Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.974901 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:26Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:26 crc kubenswrapper[4710]: I1009 09:05:26.982379 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:26Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.010975 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.010999 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.011006 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.011018 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.011027 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:27Z","lastTransitionTime":"2025-10-09T09:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.112841 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.112869 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.112877 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.112888 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.112896 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:27Z","lastTransitionTime":"2025-10-09T09:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.214568 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.214671 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.214736 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.214794 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.214847 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:27Z","lastTransitionTime":"2025-10-09T09:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.316917 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.317148 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.317157 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.317169 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.317179 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:27Z","lastTransitionTime":"2025-10-09T09:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.413629 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs\") pod \"network-metrics-daemon-p9sh6\" (UID: \"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\") " pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:27 crc kubenswrapper[4710]: E1009 09:05:27.413717 4710 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 09:05:27 crc kubenswrapper[4710]: E1009 09:05:27.413760 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs podName:b237d61d-3d37-4b76-afa3-d5fe7119b0b6 nodeName:}" failed. No retries permitted until 2025-10-09 09:05:43.413749116 +0000 UTC m=+66.903857512 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs") pod "network-metrics-daemon-p9sh6" (UID: "b237d61d-3d37-4b76-afa3-d5fe7119b0b6") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.418209 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.418259 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.418268 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.418279 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.418287 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:27Z","lastTransitionTime":"2025-10-09T09:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.519662 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.519692 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.519700 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.519712 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.519721 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:27Z","lastTransitionTime":"2025-10-09T09:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.580478 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.587368 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.590070 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:27Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.599834 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:27Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.607832 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:27Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.615343 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:27Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.621591 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.621636 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.621645 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.621657 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.621666 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:27Z","lastTransitionTime":"2025-10-09T09:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.623052 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:27Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.630187 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:27Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.636321 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:27Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.644561 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:27Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.652486 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:27Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.660475 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:27Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.667002 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:27Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.675163 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://431d9382a775d61fe64040d291ea0b11e4630c662a1e667161e3de6cab041027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3daa27b2a2509659f618bb8072b3ed3e4897519b9fc45b0ff5773475d34fba1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-b9p6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:27Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.684150 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:27Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.692159 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:27Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.703459 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:23Z\\\",\\\"message\\\":\\\" network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.245\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.245\\\\\\\", Port:9192, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1009 09:05:23.414336 6297 services_controller.go:452] Built service openshift-machine-api/cluster-autoscaler-operator per-node LB for network=default: []services.LB{}\\\\nI1009 09:05:23.414337 6297 services_controller.go:360] Finished syncing service metrics on namespace openshift-network-operator for network=default : 549.295µs\\\\nI1009 09:05:23.414342 6297 services_controller.go:453] Built service openshift-machine-api/cluster-autoscaler-operator template LB for network=default: []services.LB{}\\\\nI1009 09:05:23.414344 6297 services_controller.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:27Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.713154 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-p9sh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-p9sh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:27Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.723599 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.723701 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.723767 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.723821 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.723875 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:27Z","lastTransitionTime":"2025-10-09T09:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.825938 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.826054 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.826122 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.826183 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.826262 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:27Z","lastTransitionTime":"2025-10-09T09:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.927920 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.928046 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.928123 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.928189 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:27 crc kubenswrapper[4710]: I1009 09:05:27.928260 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:27Z","lastTransitionTime":"2025-10-09T09:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.029671 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.029762 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.029833 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.029891 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.029956 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:28Z","lastTransitionTime":"2025-10-09T09:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.131340 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.131424 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.131513 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.131580 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.131642 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:28Z","lastTransitionTime":"2025-10-09T09:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.233531 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.233668 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.233737 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.233821 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.233893 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:28Z","lastTransitionTime":"2025-10-09T09:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.335747 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.335789 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.335798 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.335810 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.335818 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:28Z","lastTransitionTime":"2025-10-09T09:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.437420 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.437460 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.437468 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.437478 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.437487 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:28Z","lastTransitionTime":"2025-10-09T09:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.539504 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.539530 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.539538 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.539549 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.539557 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:28Z","lastTransitionTime":"2025-10-09T09:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.641513 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.641541 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.641549 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.641559 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.641566 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:28Z","lastTransitionTime":"2025-10-09T09:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.743295 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.743318 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.743327 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.743336 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.743344 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:28Z","lastTransitionTime":"2025-10-09T09:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.813869 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.813920 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.813982 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:28 crc kubenswrapper[4710]: E1009 09:05:28.814096 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.814252 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:28 crc kubenswrapper[4710]: E1009 09:05:28.814296 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:28 crc kubenswrapper[4710]: E1009 09:05:28.814337 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:28 crc kubenswrapper[4710]: E1009 09:05:28.814218 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.845364 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.845477 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.845552 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.845615 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.845673 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:28Z","lastTransitionTime":"2025-10-09T09:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.947180 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.947278 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.947337 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.947388 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:28 crc kubenswrapper[4710]: I1009 09:05:28.947473 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:28Z","lastTransitionTime":"2025-10-09T09:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.049355 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.049386 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.049395 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.049407 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.049416 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:29Z","lastTransitionTime":"2025-10-09T09:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.150978 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.151009 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.151017 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.151029 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.151038 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:29Z","lastTransitionTime":"2025-10-09T09:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.252457 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.252632 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.252923 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.253018 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.253076 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:29Z","lastTransitionTime":"2025-10-09T09:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.354483 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.354514 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.354530 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.354543 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.354551 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:29Z","lastTransitionTime":"2025-10-09T09:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.456365 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.456397 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.456405 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.456417 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.456426 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:29Z","lastTransitionTime":"2025-10-09T09:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.557977 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.558007 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.558015 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.558026 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.558035 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:29Z","lastTransitionTime":"2025-10-09T09:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.659919 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.659950 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.659962 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.659973 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.659982 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:29Z","lastTransitionTime":"2025-10-09T09:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.761682 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.761709 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.761718 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.761728 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.761739 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:29Z","lastTransitionTime":"2025-10-09T09:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.863253 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.863277 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.863286 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.863298 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.863311 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:29Z","lastTransitionTime":"2025-10-09T09:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.965378 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.965410 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.965419 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.965444 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:29 crc kubenswrapper[4710]: I1009 09:05:29.965453 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:29Z","lastTransitionTime":"2025-10-09T09:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.067143 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.067169 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.067177 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.067189 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.067197 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:30Z","lastTransitionTime":"2025-10-09T09:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.168608 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.168636 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.168644 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.168654 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.168661 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:30Z","lastTransitionTime":"2025-10-09T09:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.270782 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.270818 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.270827 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.270841 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.270850 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:30Z","lastTransitionTime":"2025-10-09T09:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.372881 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.372909 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.372917 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.372928 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.372936 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:30Z","lastTransitionTime":"2025-10-09T09:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.474349 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.474376 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.474386 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.474396 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.474403 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:30Z","lastTransitionTime":"2025-10-09T09:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.575882 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.575913 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.575921 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.575933 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.575941 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:30Z","lastTransitionTime":"2025-10-09T09:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.677632 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.677668 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.677677 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.677691 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.677700 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:30Z","lastTransitionTime":"2025-10-09T09:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.779105 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.779152 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.779162 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.779173 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.779181 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:30Z","lastTransitionTime":"2025-10-09T09:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.814382 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.814466 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:30 crc kubenswrapper[4710]: E1009 09:05:30.814509 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.814520 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.814529 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:30 crc kubenswrapper[4710]: E1009 09:05:30.814591 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:30 crc kubenswrapper[4710]: E1009 09:05:30.814708 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:30 crc kubenswrapper[4710]: E1009 09:05:30.814793 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.881542 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.881570 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.881577 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.881588 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.881595 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:30Z","lastTransitionTime":"2025-10-09T09:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.983669 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.983697 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.983705 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.983714 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:30 crc kubenswrapper[4710]: I1009 09:05:30.983724 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:30Z","lastTransitionTime":"2025-10-09T09:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.086961 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.086997 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.087007 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.087020 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.087032 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:31Z","lastTransitionTime":"2025-10-09T09:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.188944 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.189001 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.189011 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.189024 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.189032 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:31Z","lastTransitionTime":"2025-10-09T09:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.290927 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.290960 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.290969 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.290981 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.290990 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:31Z","lastTransitionTime":"2025-10-09T09:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.392662 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.392695 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.392705 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.392721 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.392731 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:31Z","lastTransitionTime":"2025-10-09T09:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.494200 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.494244 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.494254 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.494268 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.494276 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:31Z","lastTransitionTime":"2025-10-09T09:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.595829 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.595854 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.595861 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.595872 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.595880 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:31Z","lastTransitionTime":"2025-10-09T09:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.697625 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.697646 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.697653 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.697662 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.697669 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:31Z","lastTransitionTime":"2025-10-09T09:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.799531 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.799571 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.799581 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.799591 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.799599 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:31Z","lastTransitionTime":"2025-10-09T09:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.901082 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.901110 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.901118 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.901129 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:31 crc kubenswrapper[4710]: I1009 09:05:31.901136 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:31Z","lastTransitionTime":"2025-10-09T09:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.002351 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.002386 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.002394 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.002406 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.002415 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:32Z","lastTransitionTime":"2025-10-09T09:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.104503 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.104536 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.104544 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.104556 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.104565 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:32Z","lastTransitionTime":"2025-10-09T09:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.206192 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.206250 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.206262 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.206272 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.206280 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:32Z","lastTransitionTime":"2025-10-09T09:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.308259 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.308297 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.308308 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.308321 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.308331 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:32Z","lastTransitionTime":"2025-10-09T09:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.409812 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.409843 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.409851 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.409863 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.409870 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:32Z","lastTransitionTime":"2025-10-09T09:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.511309 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.511364 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.511373 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.511384 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.511391 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:32Z","lastTransitionTime":"2025-10-09T09:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.612652 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.612682 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.612690 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.612702 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.612710 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:32Z","lastTransitionTime":"2025-10-09T09:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.714524 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.714552 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.714561 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.714573 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.714580 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:32Z","lastTransitionTime":"2025-10-09T09:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.813956 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.814085 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.814005 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.813999 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:32 crc kubenswrapper[4710]: E1009 09:05:32.814289 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:32 crc kubenswrapper[4710]: E1009 09:05:32.814396 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:32 crc kubenswrapper[4710]: E1009 09:05:32.814544 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:32 crc kubenswrapper[4710]: E1009 09:05:32.814605 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.815510 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.815532 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.815541 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.815551 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.815559 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:32Z","lastTransitionTime":"2025-10-09T09:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.917390 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.917516 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.917583 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.917610 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:32 crc kubenswrapper[4710]: I1009 09:05:32.917620 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:32Z","lastTransitionTime":"2025-10-09T09:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.019501 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.019540 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.019550 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.019564 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.019575 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:33Z","lastTransitionTime":"2025-10-09T09:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.121872 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.121919 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.121930 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.121941 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.121949 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:33Z","lastTransitionTime":"2025-10-09T09:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.223702 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.223743 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.223753 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.223767 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.223775 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:33Z","lastTransitionTime":"2025-10-09T09:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.325377 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.325411 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.325449 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.325468 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.325476 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:33Z","lastTransitionTime":"2025-10-09T09:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.427611 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.427647 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.427656 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.427671 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.427696 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:33Z","lastTransitionTime":"2025-10-09T09:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.529059 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.529096 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.529105 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.529115 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.529130 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:33Z","lastTransitionTime":"2025-10-09T09:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.630537 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.630574 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.630583 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.630596 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.630604 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:33Z","lastTransitionTime":"2025-10-09T09:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.732572 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.732602 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.732610 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.732623 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.732633 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:33Z","lastTransitionTime":"2025-10-09T09:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.833854 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.833896 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.833905 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.833916 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.833924 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:33Z","lastTransitionTime":"2025-10-09T09:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.935603 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.935627 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.935635 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.935645 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:33 crc kubenswrapper[4710]: I1009 09:05:33.935653 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:33Z","lastTransitionTime":"2025-10-09T09:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.037583 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.037607 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.037615 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.037636 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.037645 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:34Z","lastTransitionTime":"2025-10-09T09:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.138951 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.138993 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.139009 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.139023 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.139035 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:34Z","lastTransitionTime":"2025-10-09T09:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.241538 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.241691 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.241830 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.241917 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.241990 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:34Z","lastTransitionTime":"2025-10-09T09:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.344665 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.344852 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.344915 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.344969 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.345017 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:34Z","lastTransitionTime":"2025-10-09T09:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.446416 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.446456 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.446465 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.446476 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.446483 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:34Z","lastTransitionTime":"2025-10-09T09:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.548599 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.548627 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.548636 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.548645 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.548652 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:34Z","lastTransitionTime":"2025-10-09T09:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.650712 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.650749 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.650758 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.650771 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.650781 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:34Z","lastTransitionTime":"2025-10-09T09:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.752364 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.752510 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.752600 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.752691 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.752781 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:34Z","lastTransitionTime":"2025-10-09T09:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.814190 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.814239 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:34 crc kubenswrapper[4710]: E1009 09:05:34.814289 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.814194 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.814309 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:34 crc kubenswrapper[4710]: E1009 09:05:34.814409 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:34 crc kubenswrapper[4710]: E1009 09:05:34.814502 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:34 crc kubenswrapper[4710]: E1009 09:05:34.814540 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.854889 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.854919 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.854929 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.854939 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.854946 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:34Z","lastTransitionTime":"2025-10-09T09:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.956679 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.956712 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.956720 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.956732 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:34 crc kubenswrapper[4710]: I1009 09:05:34.956740 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:34Z","lastTransitionTime":"2025-10-09T09:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.058725 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.058754 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.058762 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.058773 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.058782 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:35Z","lastTransitionTime":"2025-10-09T09:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.160792 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.160988 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.161075 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.161166 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.161257 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:35Z","lastTransitionTime":"2025-10-09T09:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.263182 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.263216 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.263235 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.263247 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.263254 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:35Z","lastTransitionTime":"2025-10-09T09:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.365072 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.365110 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.365120 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.365137 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.365146 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:35Z","lastTransitionTime":"2025-10-09T09:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.423392 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.423427 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.423458 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.423471 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.423479 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:35Z","lastTransitionTime":"2025-10-09T09:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:35 crc kubenswrapper[4710]: E1009 09:05:35.432776 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:35Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.435001 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.435025 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.435034 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.435044 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.435051 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:35Z","lastTransitionTime":"2025-10-09T09:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:35 crc kubenswrapper[4710]: E1009 09:05:35.442907 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:35Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.448278 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.448311 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.448320 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.448330 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.448338 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:35Z","lastTransitionTime":"2025-10-09T09:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:35 crc kubenswrapper[4710]: E1009 09:05:35.456109 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:35Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.458270 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.458320 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.458330 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.458342 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.458351 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:35Z","lastTransitionTime":"2025-10-09T09:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:35 crc kubenswrapper[4710]: E1009 09:05:35.466311 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:35Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.468461 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.468489 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.468498 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.468508 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.468516 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:35Z","lastTransitionTime":"2025-10-09T09:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:35 crc kubenswrapper[4710]: E1009 09:05:35.477306 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:35Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:35 crc kubenswrapper[4710]: E1009 09:05:35.477408 4710 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.478485 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.478517 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.478544 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.478555 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.478562 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:35Z","lastTransitionTime":"2025-10-09T09:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.580156 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.580202 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.580210 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.580228 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.580237 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:35Z","lastTransitionTime":"2025-10-09T09:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.681913 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.681941 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.681970 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.681985 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.681994 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:35Z","lastTransitionTime":"2025-10-09T09:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.783254 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.783285 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.783293 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.783307 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.783315 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:35Z","lastTransitionTime":"2025-10-09T09:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.884734 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.884763 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.884772 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.884784 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.884793 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:35Z","lastTransitionTime":"2025-10-09T09:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.985936 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.985975 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.985984 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.985998 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:35 crc kubenswrapper[4710]: I1009 09:05:35.986006 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:35Z","lastTransitionTime":"2025-10-09T09:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.087484 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.087509 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.087517 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.087531 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.087539 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:36Z","lastTransitionTime":"2025-10-09T09:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.189318 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.189348 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.189357 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.189367 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.189376 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:36Z","lastTransitionTime":"2025-10-09T09:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.291401 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.291479 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.291491 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.291503 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.291527 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:36Z","lastTransitionTime":"2025-10-09T09:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.393074 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.393126 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.393135 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.393147 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.393156 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:36Z","lastTransitionTime":"2025-10-09T09:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.494979 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.495011 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.495021 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.495040 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.495050 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:36Z","lastTransitionTime":"2025-10-09T09:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.597135 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.597160 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.597168 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.597180 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.597188 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:36Z","lastTransitionTime":"2025-10-09T09:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.698399 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.698446 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.698455 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.698466 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.698473 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:36Z","lastTransitionTime":"2025-10-09T09:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.803198 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.803248 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.803260 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.803272 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.803281 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:36Z","lastTransitionTime":"2025-10-09T09:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.813896 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.813914 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.813946 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:36 crc kubenswrapper[4710]: E1009 09:05:36.813995 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.814019 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:36 crc kubenswrapper[4710]: E1009 09:05:36.814100 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:36 crc kubenswrapper[4710]: E1009 09:05:36.814129 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:36 crc kubenswrapper[4710]: E1009 09:05:36.814233 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.824412 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:36Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.832648 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:36Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.838950 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:36Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.845672 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://431d9382a775d61fe64040d291ea0b11e4630c662a1e667161e3de6cab041027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3daa27b2a2509659f618bb8072b3ed3e4897519b9fc45b0ff5773475d34fba1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-b9p6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:36Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.854084 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:36Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.861196 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eee34f9-2622-40a1-a8b4-e8543d642fad\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fc8f2eb3fc14287e7a31ee0bf390c36a0744a9d95e7924eee729f4df93089d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9898c2889f9a6045a8fe7b25dd8abbc4da70b547b547031167ef613827ca5922\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcd9e68107e1bf6c79d4d74d2069de37bcd60eed181f032371d47463ea6cf90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://208b302ef86806904caaf46782331845b3c0c67376a4f4747b3757a4899582e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://208b302ef86806904caaf46782331845b3c0c67376a4f4747b3757a4899582e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:36Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.870001 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:36Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.883236 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:23Z\\\",\\\"message\\\":\\\" network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.245\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.245\\\\\\\", Port:9192, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1009 09:05:23.414336 6297 services_controller.go:452] Built service openshift-machine-api/cluster-autoscaler-operator per-node LB for network=default: []services.LB{}\\\\nI1009 09:05:23.414337 6297 services_controller.go:360] Finished syncing service metrics on namespace openshift-network-operator for network=default : 549.295µs\\\\nI1009 09:05:23.414342 6297 services_controller.go:453] Built service openshift-machine-api/cluster-autoscaler-operator template LB for network=default: []services.LB{}\\\\nI1009 09:05:23.414344 6297 services_controller.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:36Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.890105 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-p9sh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-p9sh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:36Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.898205 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:36Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.905169 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.905272 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.905338 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.905395 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.905478 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:36Z","lastTransitionTime":"2025-10-09T09:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.906754 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:36Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.916013 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:36Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.923698 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:36Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.930850 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:36Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.937475 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:36Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.943722 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:36Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:36 crc kubenswrapper[4710]: I1009 09:05:36.951475 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:36Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.006507 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.006531 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.006538 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.006548 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.006557 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:37Z","lastTransitionTime":"2025-10-09T09:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.108255 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.108375 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.108466 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.108540 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.108597 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:37Z","lastTransitionTime":"2025-10-09T09:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.210751 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.210780 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.210790 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.210803 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.210812 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:37Z","lastTransitionTime":"2025-10-09T09:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.313064 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.313249 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.313257 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.313268 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.313277 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:37Z","lastTransitionTime":"2025-10-09T09:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.414738 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.414894 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.414960 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.415014 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.415077 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:37Z","lastTransitionTime":"2025-10-09T09:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.516571 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.516611 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.516619 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.516632 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.516645 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:37Z","lastTransitionTime":"2025-10-09T09:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.618148 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.618183 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.618192 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.618203 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.618213 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:37Z","lastTransitionTime":"2025-10-09T09:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.719649 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.719678 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.719686 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.719696 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.719704 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:37Z","lastTransitionTime":"2025-10-09T09:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.815287 4710 scope.go:117] "RemoveContainer" containerID="2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37" Oct 09 09:05:37 crc kubenswrapper[4710]: E1009 09:05:37.815481 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.821438 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.821482 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.821491 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.821501 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.821509 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:37Z","lastTransitionTime":"2025-10-09T09:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.923141 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.923167 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.923177 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.923188 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:37 crc kubenswrapper[4710]: I1009 09:05:37.923195 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:37Z","lastTransitionTime":"2025-10-09T09:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.024649 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.024681 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.024689 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.024701 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.024710 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:38Z","lastTransitionTime":"2025-10-09T09:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.126205 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.126237 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.126245 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.126256 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.126264 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:38Z","lastTransitionTime":"2025-10-09T09:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.227701 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.227745 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.227755 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.227765 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.227773 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:38Z","lastTransitionTime":"2025-10-09T09:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.328932 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.328960 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.328968 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.328980 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.328988 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:38Z","lastTransitionTime":"2025-10-09T09:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.430770 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.430810 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.430820 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.430833 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.430842 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:38Z","lastTransitionTime":"2025-10-09T09:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.532707 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.532737 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.532765 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.532776 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.532784 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:38Z","lastTransitionTime":"2025-10-09T09:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.634384 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.634409 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.634416 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.634447 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.634457 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:38Z","lastTransitionTime":"2025-10-09T09:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.736105 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.736132 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.736140 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.736150 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.736158 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:38Z","lastTransitionTime":"2025-10-09T09:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.814283 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.814329 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.814361 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:38 crc kubenswrapper[4710]: E1009 09:05:38.814380 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.814303 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:38 crc kubenswrapper[4710]: E1009 09:05:38.814503 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:38 crc kubenswrapper[4710]: E1009 09:05:38.814523 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:38 crc kubenswrapper[4710]: E1009 09:05:38.814571 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.838006 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.838026 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.838034 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.838042 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.838050 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:38Z","lastTransitionTime":"2025-10-09T09:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.940301 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.940334 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.940342 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.940353 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:38 crc kubenswrapper[4710]: I1009 09:05:38.940362 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:38Z","lastTransitionTime":"2025-10-09T09:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.042136 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.042164 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.042172 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.042182 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.042189 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:39Z","lastTransitionTime":"2025-10-09T09:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.143264 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.143290 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.143298 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.143308 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.143317 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:39Z","lastTransitionTime":"2025-10-09T09:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.245211 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.245263 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.245272 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.245283 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.245290 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:39Z","lastTransitionTime":"2025-10-09T09:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.346730 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.346759 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.346767 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.346779 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.346787 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:39Z","lastTransitionTime":"2025-10-09T09:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.447790 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.447848 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.447859 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.447870 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.447882 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:39Z","lastTransitionTime":"2025-10-09T09:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.549357 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.549382 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.549391 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.549401 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.549409 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:39Z","lastTransitionTime":"2025-10-09T09:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.650724 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.651121 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.651190 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.651267 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.651325 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:39Z","lastTransitionTime":"2025-10-09T09:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.752553 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.752582 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.752591 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.752603 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.752612 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:39Z","lastTransitionTime":"2025-10-09T09:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.821667 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.854789 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.854814 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.854823 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.854835 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.854844 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:39Z","lastTransitionTime":"2025-10-09T09:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.956355 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.956485 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.956563 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.956628 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:39 crc kubenswrapper[4710]: I1009 09:05:39.956694 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:39Z","lastTransitionTime":"2025-10-09T09:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.058466 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.058669 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.058745 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.058812 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.058874 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:40Z","lastTransitionTime":"2025-10-09T09:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.160378 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.160408 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.160416 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.160445 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.160456 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:40Z","lastTransitionTime":"2025-10-09T09:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.262200 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.262242 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.262251 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.262263 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.262271 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:40Z","lastTransitionTime":"2025-10-09T09:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.363946 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.363976 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.363984 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.363997 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.364005 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:40Z","lastTransitionTime":"2025-10-09T09:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.465478 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.465595 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.465674 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.465753 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.465825 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:40Z","lastTransitionTime":"2025-10-09T09:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.567505 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.567701 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.567761 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.567824 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.567881 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:40Z","lastTransitionTime":"2025-10-09T09:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.670070 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.670109 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.670118 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.670131 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.670139 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:40Z","lastTransitionTime":"2025-10-09T09:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.771776 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.771808 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.771816 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.771828 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.771836 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:40Z","lastTransitionTime":"2025-10-09T09:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.814205 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.814234 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:40 crc kubenswrapper[4710]: E1009 09:05:40.814301 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.814207 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.814398 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:40 crc kubenswrapper[4710]: E1009 09:05:40.814560 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:40 crc kubenswrapper[4710]: E1009 09:05:40.814666 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:40 crc kubenswrapper[4710]: E1009 09:05:40.814795 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.873574 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.873601 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.873610 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.873620 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.873628 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:40Z","lastTransitionTime":"2025-10-09T09:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.975455 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.975586 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.975645 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.975708 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:40 crc kubenswrapper[4710]: I1009 09:05:40.975766 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:40Z","lastTransitionTime":"2025-10-09T09:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.076942 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.076965 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.076973 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.076985 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.076993 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:41Z","lastTransitionTime":"2025-10-09T09:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.178661 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.178685 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.178694 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.178704 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.178712 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:41Z","lastTransitionTime":"2025-10-09T09:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.280421 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.280478 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.280488 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.280500 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.280510 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:41Z","lastTransitionTime":"2025-10-09T09:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.382034 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.382057 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.382064 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.382074 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.382082 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:41Z","lastTransitionTime":"2025-10-09T09:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.483474 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.483507 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.483516 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.483531 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.483539 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:41Z","lastTransitionTime":"2025-10-09T09:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.584790 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.584823 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.584832 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.584844 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.584854 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:41Z","lastTransitionTime":"2025-10-09T09:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.686318 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.686339 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.686348 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.686360 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.686368 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:41Z","lastTransitionTime":"2025-10-09T09:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.788287 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.788311 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.788320 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.788331 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.788338 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:41Z","lastTransitionTime":"2025-10-09T09:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.890015 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.890039 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.890048 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.890068 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.890077 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:41Z","lastTransitionTime":"2025-10-09T09:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.992128 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.992166 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.992174 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.992188 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:41 crc kubenswrapper[4710]: I1009 09:05:41.992197 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:41Z","lastTransitionTime":"2025-10-09T09:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.093943 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.093977 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.093986 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.093999 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.094009 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:42Z","lastTransitionTime":"2025-10-09T09:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.195538 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.195572 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.195583 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.195596 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.195605 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:42Z","lastTransitionTime":"2025-10-09T09:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.297298 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.297329 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.297338 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.297350 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.297358 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:42Z","lastTransitionTime":"2025-10-09T09:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.399462 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.399488 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.399496 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.399507 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.399538 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:42Z","lastTransitionTime":"2025-10-09T09:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.501072 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.501213 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.501237 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.501247 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.501255 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:42Z","lastTransitionTime":"2025-10-09T09:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.602670 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.602715 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.602725 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.602735 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.602744 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:42Z","lastTransitionTime":"2025-10-09T09:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.704155 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.704184 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.704195 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.704206 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.704214 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:42Z","lastTransitionTime":"2025-10-09T09:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.806204 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.806252 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.806262 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.806276 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.806286 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:42Z","lastTransitionTime":"2025-10-09T09:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.814623 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.814683 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.814711 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:42 crc kubenswrapper[4710]: E1009 09:05:42.814713 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.814692 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:42 crc kubenswrapper[4710]: E1009 09:05:42.814791 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:42 crc kubenswrapper[4710]: E1009 09:05:42.814840 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:42 crc kubenswrapper[4710]: E1009 09:05:42.814907 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.908136 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.908292 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.908360 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.908447 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:42 crc kubenswrapper[4710]: I1009 09:05:42.908531 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:42Z","lastTransitionTime":"2025-10-09T09:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.010632 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.010656 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.010665 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.010675 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.010684 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:43Z","lastTransitionTime":"2025-10-09T09:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.112473 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.112500 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.112508 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.112519 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.112527 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:43Z","lastTransitionTime":"2025-10-09T09:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.214149 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.214181 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.214189 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.214202 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.214212 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:43Z","lastTransitionTime":"2025-10-09T09:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.315405 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.315458 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.315467 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.315479 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.315489 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:43Z","lastTransitionTime":"2025-10-09T09:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.416768 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.416798 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.416807 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.416818 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.416825 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:43Z","lastTransitionTime":"2025-10-09T09:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.438359 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs\") pod \"network-metrics-daemon-p9sh6\" (UID: \"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\") " pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:43 crc kubenswrapper[4710]: E1009 09:05:43.438490 4710 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 09:05:43 crc kubenswrapper[4710]: E1009 09:05:43.438537 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs podName:b237d61d-3d37-4b76-afa3-d5fe7119b0b6 nodeName:}" failed. No retries permitted until 2025-10-09 09:06:15.438525177 +0000 UTC m=+98.928633574 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs") pod "network-metrics-daemon-p9sh6" (UID: "b237d61d-3d37-4b76-afa3-d5fe7119b0b6") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.518607 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.518655 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.518663 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.518676 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.518684 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:43Z","lastTransitionTime":"2025-10-09T09:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.619978 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.620006 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.620014 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.620025 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.620032 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:43Z","lastTransitionTime":"2025-10-09T09:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.721328 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.721359 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.721367 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.721396 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.721408 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:43Z","lastTransitionTime":"2025-10-09T09:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.823163 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.823194 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.823202 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.823215 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.823257 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:43Z","lastTransitionTime":"2025-10-09T09:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.925137 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.925178 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.925189 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.925201 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:43 crc kubenswrapper[4710]: I1009 09:05:43.925208 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:43Z","lastTransitionTime":"2025-10-09T09:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.026798 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.026827 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.026836 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.026849 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.026875 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:44Z","lastTransitionTime":"2025-10-09T09:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.128892 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.128926 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.128935 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.128948 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.128956 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:44Z","lastTransitionTime":"2025-10-09T09:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.230699 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.230721 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.230728 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.230736 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.230743 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:44Z","lastTransitionTime":"2025-10-09T09:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.332651 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.332673 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.332681 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.332690 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.332698 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:44Z","lastTransitionTime":"2025-10-09T09:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.434578 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.434600 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.434608 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.434617 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.434624 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:44Z","lastTransitionTime":"2025-10-09T09:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.536311 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.536344 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.536352 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.536404 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.536414 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:44Z","lastTransitionTime":"2025-10-09T09:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.638096 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.638130 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.638139 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.638152 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.638160 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:44Z","lastTransitionTime":"2025-10-09T09:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.739630 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.739673 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.739682 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.739692 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.739700 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:44Z","lastTransitionTime":"2025-10-09T09:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.814231 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.814277 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:44 crc kubenswrapper[4710]: E1009 09:05:44.814305 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.814279 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:44 crc kubenswrapper[4710]: E1009 09:05:44.814385 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.814398 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:44 crc kubenswrapper[4710]: E1009 09:05:44.814556 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:44 crc kubenswrapper[4710]: E1009 09:05:44.814620 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.841551 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.841578 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.841587 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.841597 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.841607 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:44Z","lastTransitionTime":"2025-10-09T09:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.943361 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.943389 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.943398 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.943408 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:44 crc kubenswrapper[4710]: I1009 09:05:44.943417 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:44Z","lastTransitionTime":"2025-10-09T09:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.044800 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.044823 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.044832 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.044842 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.044850 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:45Z","lastTransitionTime":"2025-10-09T09:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.146013 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.146040 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.146050 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.146059 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.146067 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:45Z","lastTransitionTime":"2025-10-09T09:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.247945 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.247971 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.247997 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.248007 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.248015 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:45Z","lastTransitionTime":"2025-10-09T09:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.349906 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.349938 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.349946 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.349957 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.349966 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:45Z","lastTransitionTime":"2025-10-09T09:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.451359 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.451391 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.451400 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.451412 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.451420 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:45Z","lastTransitionTime":"2025-10-09T09:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.553212 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.553337 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.553401 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.553500 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.553565 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:45Z","lastTransitionTime":"2025-10-09T09:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.656055 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.656179 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.656258 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.656332 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.656397 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:45Z","lastTransitionTime":"2025-10-09T09:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.758403 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.758445 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.758454 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.758466 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.758474 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:45Z","lastTransitionTime":"2025-10-09T09:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.760891 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.760984 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.761047 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.761112 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.761170 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:45Z","lastTransitionTime":"2025-10-09T09:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:45 crc kubenswrapper[4710]: E1009 09:05:45.773275 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:45Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.776316 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.776355 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.776365 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.776375 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.776385 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:45Z","lastTransitionTime":"2025-10-09T09:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:45 crc kubenswrapper[4710]: E1009 09:05:45.785047 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:45Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.787577 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.787612 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.787622 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.787633 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.787641 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:45Z","lastTransitionTime":"2025-10-09T09:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:45 crc kubenswrapper[4710]: E1009 09:05:45.795841 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:45Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.798975 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.799001 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.799026 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.799038 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.799051 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:45Z","lastTransitionTime":"2025-10-09T09:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:45 crc kubenswrapper[4710]: E1009 09:05:45.811215 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:45Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.817970 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.818004 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.818018 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.818032 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.818041 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:45Z","lastTransitionTime":"2025-10-09T09:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:45 crc kubenswrapper[4710]: E1009 09:05:45.826961 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:45Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:45 crc kubenswrapper[4710]: E1009 09:05:45.827068 4710 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.859843 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.859870 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.859878 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.859889 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.859897 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:45Z","lastTransitionTime":"2025-10-09T09:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.961550 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.961567 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.961576 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.961587 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:45 crc kubenswrapper[4710]: I1009 09:05:45.961595 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:45Z","lastTransitionTime":"2025-10-09T09:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.063024 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.063050 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.063060 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.063072 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.063080 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:46Z","lastTransitionTime":"2025-10-09T09:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.076929 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5c9mg_421bdfde-a7ad-4e4c-aa0d-624104899b94/kube-multus/0.log" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.076973 4710 generic.go:334] "Generic (PLEG): container finished" podID="421bdfde-a7ad-4e4c-aa0d-624104899b94" containerID="c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5" exitCode=1 Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.077003 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5c9mg" event={"ID":"421bdfde-a7ad-4e4c-aa0d-624104899b94","Type":"ContainerDied","Data":"c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5"} Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.077799 4710 scope.go:117] "RemoveContainer" containerID="c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.089186 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2651d0d9-69b6-43e3-ac07-2b282edaf457\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb3a6a6b28c64ba39e628cd4f0ca4cdd07fecb98e9b5a5598b9bbc06d90ec825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a3613a4e5d2245727f06ede3f9973d27bdc4f1e4143ded9ce439003c695a1a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a3613a4e5d2245727f06ede3f9973d27bdc4f1e4143ded9ce439003c695a1a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.098872 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"2025-10-09T09:05:00+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_27d3f5c3-bae8-4611-9873-21e4a112b64c\\\\n2025-10-09T09:05:00+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_27d3f5c3-bae8-4611-9873-21e4a112b64c to /host/opt/cni/bin/\\\\n2025-10-09T09:05:00Z [verbose] multus-daemon started\\\\n2025-10-09T09:05:00Z [verbose] Readiness Indicator file check\\\\n2025-10-09T09:05:45Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.108373 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.116519 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.126572 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.135255 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.143378 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.150586 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.161272 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.164631 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.164657 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.164666 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.164686 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.164696 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:46Z","lastTransitionTime":"2025-10-09T09:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.169967 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.179202 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.189344 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.196041 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://431d9382a775d61fe64040d291ea0b11e4630c662a1e667161e3de6cab041027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3daa27b2a2509659f618bb8072b3ed3e4897519b9fc45b0ff5773475d34fba1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-b9p6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.203569 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.210857 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eee34f9-2622-40a1-a8b4-e8543d642fad\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fc8f2eb3fc14287e7a31ee0bf390c36a0744a9d95e7924eee729f4df93089d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9898c2889f9a6045a8fe7b25dd8abbc4da70b547b547031167ef613827ca5922\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcd9e68107e1bf6c79d4d74d2069de37bcd60eed181f032371d47463ea6cf90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://208b302ef86806904caaf46782331845b3c0c67376a4f4747b3757a4899582e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://208b302ef86806904caaf46782331845b3c0c67376a4f4747b3757a4899582e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.218985 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.231533 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:23Z\\\",\\\"message\\\":\\\" network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.245\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.245\\\\\\\", Port:9192, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1009 09:05:23.414336 6297 services_controller.go:452] Built service openshift-machine-api/cluster-autoscaler-operator per-node LB for network=default: []services.LB{}\\\\nI1009 09:05:23.414337 6297 services_controller.go:360] Finished syncing service metrics on namespace openshift-network-operator for network=default : 549.295µs\\\\nI1009 09:05:23.414342 6297 services_controller.go:453] Built service openshift-machine-api/cluster-autoscaler-operator template LB for network=default: []services.LB{}\\\\nI1009 09:05:23.414344 6297 services_controller.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.238323 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-p9sh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-p9sh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.267151 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.267187 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.267195 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.267207 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.267230 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:46Z","lastTransitionTime":"2025-10-09T09:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.368820 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.368856 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.368866 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.368879 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.368892 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:46Z","lastTransitionTime":"2025-10-09T09:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.470518 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.470551 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.470560 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.470576 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.470585 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:46Z","lastTransitionTime":"2025-10-09T09:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.572152 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.572183 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.572191 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.572203 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.572212 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:46Z","lastTransitionTime":"2025-10-09T09:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.673669 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.673702 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.673710 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.673725 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.673734 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:46Z","lastTransitionTime":"2025-10-09T09:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.775121 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.775524 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.775590 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.775651 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.775709 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:46Z","lastTransitionTime":"2025-10-09T09:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.814672 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.814706 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.814782 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:46 crc kubenswrapper[4710]: E1009 09:05:46.814768 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:46 crc kubenswrapper[4710]: E1009 09:05:46.814889 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:46 crc kubenswrapper[4710]: E1009 09:05:46.814949 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.814998 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:46 crc kubenswrapper[4710]: E1009 09:05:46.815043 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.823116 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2651d0d9-69b6-43e3-ac07-2b282edaf457\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb3a6a6b28c64ba39e628cd4f0ca4cdd07fecb98e9b5a5598b9bbc06d90ec825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a3613a4e5d2245727f06ede3f9973d27bdc4f1e4143ded9ce439003c695a1a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a3613a4e5d2245727f06ede3f9973d27bdc4f1e4143ded9ce439003c695a1a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.832074 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"2025-10-09T09:05:00+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_27d3f5c3-bae8-4611-9873-21e4a112b64c\\\\n2025-10-09T09:05:00+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_27d3f5c3-bae8-4611-9873-21e4a112b64c to /host/opt/cni/bin/\\\\n2025-10-09T09:05:00Z [verbose] multus-daemon started\\\\n2025-10-09T09:05:00Z [verbose] Readiness Indicator file check\\\\n2025-10-09T09:05:45Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.842151 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.850719 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.858763 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.866102 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.873151 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.876695 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.876720 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.876730 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.876742 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.876750 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:46Z","lastTransitionTime":"2025-10-09T09:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.884161 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.893748 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.904233 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.912420 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.919042 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.926155 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://431d9382a775d61fe64040d291ea0b11e4630c662a1e667161e3de6cab041027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3daa27b2a2509659f618bb8072b3ed3e4897519b9fc45b0ff5773475d34fba1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-b9p6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.933852 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.941405 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eee34f9-2622-40a1-a8b4-e8543d642fad\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fc8f2eb3fc14287e7a31ee0bf390c36a0744a9d95e7924eee729f4df93089d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9898c2889f9a6045a8fe7b25dd8abbc4da70b547b547031167ef613827ca5922\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcd9e68107e1bf6c79d4d74d2069de37bcd60eed181f032371d47463ea6cf90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://208b302ef86806904caaf46782331845b3c0c67376a4f4747b3757a4899582e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://208b302ef86806904caaf46782331845b3c0c67376a4f4747b3757a4899582e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.949731 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.962453 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:23Z\\\",\\\"message\\\":\\\" network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.245\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.245\\\\\\\", Port:9192, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1009 09:05:23.414336 6297 services_controller.go:452] Built service openshift-machine-api/cluster-autoscaler-operator per-node LB for network=default: []services.LB{}\\\\nI1009 09:05:23.414337 6297 services_controller.go:360] Finished syncing service metrics on namespace openshift-network-operator for network=default : 549.295µs\\\\nI1009 09:05:23.414342 6297 services_controller.go:453] Built service openshift-machine-api/cluster-autoscaler-operator template LB for network=default: []services.LB{}\\\\nI1009 09:05:23.414344 6297 services_controller.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.969100 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-p9sh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-p9sh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:46Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.978255 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.978308 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.978319 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.978331 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:46 crc kubenswrapper[4710]: I1009 09:05:46.978340 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:46Z","lastTransitionTime":"2025-10-09T09:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.079532 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.079558 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.079567 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.079578 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.079587 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:47Z","lastTransitionTime":"2025-10-09T09:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.080836 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5c9mg_421bdfde-a7ad-4e4c-aa0d-624104899b94/kube-multus/0.log" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.080880 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5c9mg" event={"ID":"421bdfde-a7ad-4e4c-aa0d-624104899b94","Type":"ContainerStarted","Data":"5954ec1339b6e50d0c48c43be96a3a54b58d4fe715bdc47b91aa80b107aad7b9"} Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.089711 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://431d9382a775d61fe64040d291ea0b11e4630c662a1e667161e3de6cab041027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3daa27b2a2509659f618bb8072b3ed3e4897519b9fc45b0ff5773475d34fba1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-b9p6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:47Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.098488 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:47Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.106184 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:47Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.113878 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:47Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.120092 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:47Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.131748 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:23Z\\\",\\\"message\\\":\\\" network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.245\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.245\\\\\\\", Port:9192, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1009 09:05:23.414336 6297 services_controller.go:452] Built service openshift-machine-api/cluster-autoscaler-operator per-node LB for network=default: []services.LB{}\\\\nI1009 09:05:23.414337 6297 services_controller.go:360] Finished syncing service metrics on namespace openshift-network-operator for network=default : 549.295µs\\\\nI1009 09:05:23.414342 6297 services_controller.go:453] Built service openshift-machine-api/cluster-autoscaler-operator template LB for network=default: []services.LB{}\\\\nI1009 09:05:23.414344 6297 services_controller.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:47Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.138648 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-p9sh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-p9sh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:47Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.146233 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:47Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.157006 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eee34f9-2622-40a1-a8b4-e8543d642fad\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fc8f2eb3fc14287e7a31ee0bf390c36a0744a9d95e7924eee729f4df93089d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9898c2889f9a6045a8fe7b25dd8abbc4da70b547b547031167ef613827ca5922\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcd9e68107e1bf6c79d4d74d2069de37bcd60eed181f032371d47463ea6cf90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://208b302ef86806904caaf46782331845b3c0c67376a4f4747b3757a4899582e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://208b302ef86806904caaf46782331845b3c0c67376a4f4747b3757a4899582e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:47Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.165164 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:47Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.172483 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2651d0d9-69b6-43e3-ac07-2b282edaf457\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb3a6a6b28c64ba39e628cd4f0ca4cdd07fecb98e9b5a5598b9bbc06d90ec825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a3613a4e5d2245727f06ede3f9973d27bdc4f1e4143ded9ce439003c695a1a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a3613a4e5d2245727f06ede3f9973d27bdc4f1e4143ded9ce439003c695a1a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:47Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.180333 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5954ec1339b6e50d0c48c43be96a3a54b58d4fe715bdc47b91aa80b107aad7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"2025-10-09T09:05:00+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_27d3f5c3-bae8-4611-9873-21e4a112b64c\\\\n2025-10-09T09:05:00+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_27d3f5c3-bae8-4611-9873-21e4a112b64c to /host/opt/cni/bin/\\\\n2025-10-09T09:05:00Z [verbose] multus-daemon started\\\\n2025-10-09T09:05:00Z [verbose] Readiness Indicator file check\\\\n2025-10-09T09:05:45Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:47Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.181304 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.181332 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.181342 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.181353 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.181362 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:47Z","lastTransitionTime":"2025-10-09T09:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.192063 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:47Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.199731 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:47Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.208122 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:47Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.217824 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:47Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.228351 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:47Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.236608 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:47Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.283004 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.283036 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.283045 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.283056 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.283065 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:47Z","lastTransitionTime":"2025-10-09T09:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.384868 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.384902 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.384912 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.384926 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.384936 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:47Z","lastTransitionTime":"2025-10-09T09:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.486736 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.486775 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.486784 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.486796 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.486805 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:47Z","lastTransitionTime":"2025-10-09T09:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.588816 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.588845 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.588855 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.588869 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.588879 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:47Z","lastTransitionTime":"2025-10-09T09:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.690163 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.690196 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.690207 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.690230 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.690241 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:47Z","lastTransitionTime":"2025-10-09T09:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.792109 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.792138 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.792146 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.792157 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.792164 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:47Z","lastTransitionTime":"2025-10-09T09:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.894618 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.894654 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.894664 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.894677 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.894686 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:47Z","lastTransitionTime":"2025-10-09T09:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.996465 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.996952 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.997086 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.997195 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:47 crc kubenswrapper[4710]: I1009 09:05:47.997296 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:47Z","lastTransitionTime":"2025-10-09T09:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.099948 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.100238 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.100348 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.100470 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.100533 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:48Z","lastTransitionTime":"2025-10-09T09:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.202867 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.202904 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.202913 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.202927 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.202937 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:48Z","lastTransitionTime":"2025-10-09T09:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.306248 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.306285 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.306295 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.306308 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.306320 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:48Z","lastTransitionTime":"2025-10-09T09:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.407985 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.408020 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.408028 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.408041 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.408050 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:48Z","lastTransitionTime":"2025-10-09T09:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.509693 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.509730 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.509738 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.509752 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.509762 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:48Z","lastTransitionTime":"2025-10-09T09:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.611691 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.611721 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.611730 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.611744 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.611753 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:48Z","lastTransitionTime":"2025-10-09T09:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.713452 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.713484 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.713508 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.713521 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.713530 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:48Z","lastTransitionTime":"2025-10-09T09:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.814074 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.814074 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.814135 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:48 crc kubenswrapper[4710]: E1009 09:05:48.814232 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.814342 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:48 crc kubenswrapper[4710]: E1009 09:05:48.814374 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:48 crc kubenswrapper[4710]: E1009 09:05:48.814482 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:48 crc kubenswrapper[4710]: E1009 09:05:48.814512 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.815004 4710 scope.go:117] "RemoveContainer" containerID="2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.816354 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.816419 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.816452 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.816478 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.816493 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:48Z","lastTransitionTime":"2025-10-09T09:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.919642 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.919664 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.919672 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.919684 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:48 crc kubenswrapper[4710]: I1009 09:05:48.919693 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:48Z","lastTransitionTime":"2025-10-09T09:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.021232 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.021256 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.021266 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.021278 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.021287 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:49Z","lastTransitionTime":"2025-10-09T09:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.086934 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mxql9_0aae2f40-061f-4e34-abaa-11bafcd40ef6/ovnkube-controller/2.log" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.089167 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerStarted","Data":"494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19"} Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.090019 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.111927 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2651d0d9-69b6-43e3-ac07-2b282edaf457\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb3a6a6b28c64ba39e628cd4f0ca4cdd07fecb98e9b5a5598b9bbc06d90ec825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a3613a4e5d2245727f06ede3f9973d27bdc4f1e4143ded9ce439003c695a1a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a3613a4e5d2245727f06ede3f9973d27bdc4f1e4143ded9ce439003c695a1a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.123270 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.123291 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.123300 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.123329 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.123339 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:49Z","lastTransitionTime":"2025-10-09T09:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.129370 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5954ec1339b6e50d0c48c43be96a3a54b58d4fe715bdc47b91aa80b107aad7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"2025-10-09T09:05:00+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_27d3f5c3-bae8-4611-9873-21e4a112b64c\\\\n2025-10-09T09:05:00+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_27d3f5c3-bae8-4611-9873-21e4a112b64c to /host/opt/cni/bin/\\\\n2025-10-09T09:05:00Z [verbose] multus-daemon started\\\\n2025-10-09T09:05:00Z [verbose] Readiness Indicator file check\\\\n2025-10-09T09:05:45Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.140743 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.150813 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.160867 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.170244 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.178772 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.185901 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.196487 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.205307 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.214755 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.223607 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.224749 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.224791 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.224806 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.224831 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.224844 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:49Z","lastTransitionTime":"2025-10-09T09:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.232533 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://431d9382a775d61fe64040d291ea0b11e4630c662a1e667161e3de6cab041027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3daa27b2a2509659f618bb8072b3ed3e4897519b9fc45b0ff5773475d34fba1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-b9p6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.243889 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.253343 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eee34f9-2622-40a1-a8b4-e8543d642fad\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fc8f2eb3fc14287e7a31ee0bf390c36a0744a9d95e7924eee729f4df93089d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9898c2889f9a6045a8fe7b25dd8abbc4da70b547b547031167ef613827ca5922\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcd9e68107e1bf6c79d4d74d2069de37bcd60eed181f032371d47463ea6cf90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://208b302ef86806904caaf46782331845b3c0c67376a4f4747b3757a4899582e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://208b302ef86806904caaf46782331845b3c0c67376a4f4747b3757a4899582e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.263710 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.278364 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:23Z\\\",\\\"message\\\":\\\" network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.245\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.245\\\\\\\", Port:9192, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1009 09:05:23.414336 6297 services_controller.go:452] Built service openshift-machine-api/cluster-autoscaler-operator per-node LB for network=default: []services.LB{}\\\\nI1009 09:05:23.414337 6297 services_controller.go:360] Finished syncing service metrics on namespace openshift-network-operator for network=default : 549.295µs\\\\nI1009 09:05:23.414342 6297 services_controller.go:453] Built service openshift-machine-api/cluster-autoscaler-operator template LB for network=default: []services.LB{}\\\\nI1009 09:05:23.414344 6297 services_controller.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.287119 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-p9sh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-p9sh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.326760 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.326885 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.326954 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.327023 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.327077 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:49Z","lastTransitionTime":"2025-10-09T09:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.429771 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.429801 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.429809 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.429821 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.429829 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:49Z","lastTransitionTime":"2025-10-09T09:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.535394 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.535467 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.535478 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.535492 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.535502 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:49Z","lastTransitionTime":"2025-10-09T09:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.636991 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.637022 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.637031 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.637044 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.637053 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:49Z","lastTransitionTime":"2025-10-09T09:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.740576 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.740612 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.740620 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.740635 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.740645 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:49Z","lastTransitionTime":"2025-10-09T09:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.842517 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.842545 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.842553 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.842566 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.842574 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:49Z","lastTransitionTime":"2025-10-09T09:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.944412 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.944464 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.944473 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.944487 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:49 crc kubenswrapper[4710]: I1009 09:05:49.944498 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:49Z","lastTransitionTime":"2025-10-09T09:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.045691 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.045715 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.045724 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.045736 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.045744 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:50Z","lastTransitionTime":"2025-10-09T09:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.091842 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mxql9_0aae2f40-061f-4e34-abaa-11bafcd40ef6/ovnkube-controller/3.log" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.092311 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mxql9_0aae2f40-061f-4e34-abaa-11bafcd40ef6/ovnkube-controller/2.log" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.093867 4710 generic.go:334] "Generic (PLEG): container finished" podID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerID="494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19" exitCode=1 Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.093899 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerDied","Data":"494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19"} Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.093926 4710 scope.go:117] "RemoveContainer" containerID="2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.094456 4710 scope.go:117] "RemoveContainer" containerID="494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19" Oct 09 09:05:50 crc kubenswrapper[4710]: E1009 09:05:50.094636 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.111200 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2651d0d9-69b6-43e3-ac07-2b282edaf457\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb3a6a6b28c64ba39e628cd4f0ca4cdd07fecb98e9b5a5598b9bbc06d90ec825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a3613a4e5d2245727f06ede3f9973d27bdc4f1e4143ded9ce439003c695a1a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a3613a4e5d2245727f06ede3f9973d27bdc4f1e4143ded9ce439003c695a1a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:50Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.120373 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5954ec1339b6e50d0c48c43be96a3a54b58d4fe715bdc47b91aa80b107aad7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"2025-10-09T09:05:00+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_27d3f5c3-bae8-4611-9873-21e4a112b64c\\\\n2025-10-09T09:05:00+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_27d3f5c3-bae8-4611-9873-21e4a112b64c to /host/opt/cni/bin/\\\\n2025-10-09T09:05:00Z [verbose] multus-daemon started\\\\n2025-10-09T09:05:00Z [verbose] Readiness Indicator file check\\\\n2025-10-09T09:05:45Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:50Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.130094 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:50Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.138825 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:50Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.146892 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:50Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.148007 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.148044 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.148053 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.148068 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.148078 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:50Z","lastTransitionTime":"2025-10-09T09:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.153996 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:50Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.160420 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:50Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.165942 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:50Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.174608 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:50Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.183034 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:50Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.190206 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:50Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.197907 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:50Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.206261 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://431d9382a775d61fe64040d291ea0b11e4630c662a1e667161e3de6cab041027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3daa27b2a2509659f618bb8072b3ed3e4897519b9fc45b0ff5773475d34fba1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-b9p6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:50Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.215001 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:50Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.222556 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eee34f9-2622-40a1-a8b4-e8543d642fad\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fc8f2eb3fc14287e7a31ee0bf390c36a0744a9d95e7924eee729f4df93089d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9898c2889f9a6045a8fe7b25dd8abbc4da70b547b547031167ef613827ca5922\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcd9e68107e1bf6c79d4d74d2069de37bcd60eed181f032371d47463ea6cf90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://208b302ef86806904caaf46782331845b3c0c67376a4f4747b3757a4899582e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://208b302ef86806904caaf46782331845b3c0c67376a4f4747b3757a4899582e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:50Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.231311 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:50Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.244507 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2814a7c4d46294b7ddad8c0dad16f9dae7d85e61d2a806d3e9dc9dc94a412e37\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:23Z\\\",\\\"message\\\":\\\" network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/cluster-autoscaler-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.245\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.245\\\\\\\", Port:9192, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1009 09:05:23.414336 6297 services_controller.go:452] Built service openshift-machine-api/cluster-autoscaler-operator per-node LB for network=default: []services.LB{}\\\\nI1009 09:05:23.414337 6297 services_controller.go:360] Finished syncing service metrics on namespace openshift-network-operator for network=default : 549.295µs\\\\nI1009 09:05:23.414342 6297 services_controller.go:453] Built service openshift-machine-api/cluster-autoscaler-operator template LB for network=default: []services.LB{}\\\\nI1009 09:05:23.414344 6297 services_controller.\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:49Z\\\",\\\"message\\\":\\\"0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z]\\\\nI1009 09:05:49.475469 6664 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1009 09:05:49.475389 6664 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1009 09:05:49.475474 6664 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-l4vw4\\\\nI1009 09:05:49.475418 6664 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/kube-rbac-proxy-crio-crc\\\\nI1009 09:05:49.475479 6664 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1009 09:05:4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:50Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.250304 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.250328 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.250338 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.250350 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.250358 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:50Z","lastTransitionTime":"2025-10-09T09:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.252642 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-p9sh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-p9sh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:50Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.352114 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.352140 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.352148 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.352159 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.352167 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:50Z","lastTransitionTime":"2025-10-09T09:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.454484 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.454651 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.454720 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.454791 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.454856 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:50Z","lastTransitionTime":"2025-10-09T09:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.556455 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.556501 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.556512 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.556529 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.556538 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:50Z","lastTransitionTime":"2025-10-09T09:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.658010 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.658039 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.658047 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.658059 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.658066 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:50Z","lastTransitionTime":"2025-10-09T09:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.759875 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.759900 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.759909 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.759919 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.759928 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:50Z","lastTransitionTime":"2025-10-09T09:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.814685 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.814729 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.814772 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:50 crc kubenswrapper[4710]: E1009 09:05:50.814779 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:50 crc kubenswrapper[4710]: E1009 09:05:50.814845 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.814885 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:50 crc kubenswrapper[4710]: E1009 09:05:50.814917 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:50 crc kubenswrapper[4710]: E1009 09:05:50.814942 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.861599 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.861632 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.861642 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.861655 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.861667 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:50Z","lastTransitionTime":"2025-10-09T09:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.963692 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.963725 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.963734 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.963746 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:50 crc kubenswrapper[4710]: I1009 09:05:50.963754 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:50Z","lastTransitionTime":"2025-10-09T09:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.065004 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.065044 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.065055 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.065070 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.065080 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:51Z","lastTransitionTime":"2025-10-09T09:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.097566 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mxql9_0aae2f40-061f-4e34-abaa-11bafcd40ef6/ovnkube-controller/3.log" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.100169 4710 scope.go:117] "RemoveContainer" containerID="494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19" Oct 09 09:05:51 crc kubenswrapper[4710]: E1009 09:05:51.100384 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.109954 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:51Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.117811 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:51Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.124382 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:51Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.131598 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://431d9382a775d61fe64040d291ea0b11e4630c662a1e667161e3de6cab041027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3daa27b2a2509659f618bb8072b3ed3e4897519b9fc45b0ff5773475d34fba1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-b9p6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:51Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.140743 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:51Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.148722 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eee34f9-2622-40a1-a8b4-e8543d642fad\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fc8f2eb3fc14287e7a31ee0bf390c36a0744a9d95e7924eee729f4df93089d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9898c2889f9a6045a8fe7b25dd8abbc4da70b547b547031167ef613827ca5922\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcd9e68107e1bf6c79d4d74d2069de37bcd60eed181f032371d47463ea6cf90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://208b302ef86806904caaf46782331845b3c0c67376a4f4747b3757a4899582e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://208b302ef86806904caaf46782331845b3c0c67376a4f4747b3757a4899582e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:51Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.157687 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:51Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.166498 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.166526 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.166536 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.166548 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.166557 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:51Z","lastTransitionTime":"2025-10-09T09:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.170098 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:49Z\\\",\\\"message\\\":\\\"0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z]\\\\nI1009 09:05:49.475469 6664 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1009 09:05:49.475389 6664 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1009 09:05:49.475474 6664 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-l4vw4\\\\nI1009 09:05:49.475418 6664 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/kube-rbac-proxy-crio-crc\\\\nI1009 09:05:49.475479 6664 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1009 09:05:4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:51Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.177111 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-p9sh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-p9sh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:51Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.185680 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:51Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.195504 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5954ec1339b6e50d0c48c43be96a3a54b58d4fe715bdc47b91aa80b107aad7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"2025-10-09T09:05:00+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_27d3f5c3-bae8-4611-9873-21e4a112b64c\\\\n2025-10-09T09:05:00+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_27d3f5c3-bae8-4611-9873-21e4a112b64c to /host/opt/cni/bin/\\\\n2025-10-09T09:05:00Z [verbose] multus-daemon started\\\\n2025-10-09T09:05:00Z [verbose] Readiness Indicator file check\\\\n2025-10-09T09:05:45Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:51Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.205927 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:51Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.214196 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2651d0d9-69b6-43e3-ac07-2b282edaf457\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb3a6a6b28c64ba39e628cd4f0ca4cdd07fecb98e9b5a5598b9bbc06d90ec825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a3613a4e5d2245727f06ede3f9973d27bdc4f1e4143ded9ce439003c695a1a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a3613a4e5d2245727f06ede3f9973d27bdc4f1e4143ded9ce439003c695a1a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:51Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.222181 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:51Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.229961 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:51Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.236833 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:51Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.243015 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:51Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.250579 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:51Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.268751 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.268779 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.268788 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.268817 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.268826 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:51Z","lastTransitionTime":"2025-10-09T09:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.371144 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.371196 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.371206 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.371228 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.371238 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:51Z","lastTransitionTime":"2025-10-09T09:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.473549 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.473576 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.473584 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.473595 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.473603 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:51Z","lastTransitionTime":"2025-10-09T09:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.575215 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.575323 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.575400 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.575491 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.575557 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:51Z","lastTransitionTime":"2025-10-09T09:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.677175 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.677211 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.677232 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.677245 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.677253 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:51Z","lastTransitionTime":"2025-10-09T09:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.779070 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.779110 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.779120 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.779133 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.779142 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:51Z","lastTransitionTime":"2025-10-09T09:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.880452 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.880770 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.880849 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.880921 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.880982 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:51Z","lastTransitionTime":"2025-10-09T09:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.982702 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.982801 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.982866 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.982941 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:51 crc kubenswrapper[4710]: I1009 09:05:51.982998 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:51Z","lastTransitionTime":"2025-10-09T09:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.084408 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.084458 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.084467 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.084478 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.084485 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:52Z","lastTransitionTime":"2025-10-09T09:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.185907 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.185934 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.185943 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.185952 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.185961 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:52Z","lastTransitionTime":"2025-10-09T09:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.287641 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.287669 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.287678 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.287686 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.287693 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:52Z","lastTransitionTime":"2025-10-09T09:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.389204 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.389250 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.389259 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.389272 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.389283 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:52Z","lastTransitionTime":"2025-10-09T09:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.490387 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.490423 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.490448 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.490462 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.490472 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:52Z","lastTransitionTime":"2025-10-09T09:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.591904 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.591942 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.591950 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.591961 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.591969 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:52Z","lastTransitionTime":"2025-10-09T09:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.695176 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.695212 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.695235 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.695250 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.695263 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:52Z","lastTransitionTime":"2025-10-09T09:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.797279 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.797322 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.797333 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.797349 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.797359 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:52Z","lastTransitionTime":"2025-10-09T09:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.814585 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.814608 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:52 crc kubenswrapper[4710]: E1009 09:05:52.814695 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.814754 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:52 crc kubenswrapper[4710]: E1009 09:05:52.814845 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.814929 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:52 crc kubenswrapper[4710]: E1009 09:05:52.814994 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:52 crc kubenswrapper[4710]: E1009 09:05:52.815075 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.899145 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.899182 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.899190 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.899203 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:52 crc kubenswrapper[4710]: I1009 09:05:52.899212 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:52Z","lastTransitionTime":"2025-10-09T09:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.000958 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.001008 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.001018 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.001029 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.001038 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:53Z","lastTransitionTime":"2025-10-09T09:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.102971 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.103004 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.103016 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.103029 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.103040 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:53Z","lastTransitionTime":"2025-10-09T09:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.204638 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.204666 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.204674 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.204684 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.204692 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:53Z","lastTransitionTime":"2025-10-09T09:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.306751 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.306781 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.306789 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.306799 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.306806 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:53Z","lastTransitionTime":"2025-10-09T09:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.408930 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.408964 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.408974 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.408986 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.408998 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:53Z","lastTransitionTime":"2025-10-09T09:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.510933 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.510987 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.510995 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.511009 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.511018 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:53Z","lastTransitionTime":"2025-10-09T09:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.613021 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.613053 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.613062 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.613073 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.613088 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:53Z","lastTransitionTime":"2025-10-09T09:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.715135 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.715187 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.715197 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.715207 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.715216 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:53Z","lastTransitionTime":"2025-10-09T09:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.817142 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.817165 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.817172 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.817182 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.817188 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:53Z","lastTransitionTime":"2025-10-09T09:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.919453 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.919763 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.919833 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.919894 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:53 crc kubenswrapper[4710]: I1009 09:05:53.919960 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:53Z","lastTransitionTime":"2025-10-09T09:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.022060 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.022094 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.022104 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.022116 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.022124 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:54Z","lastTransitionTime":"2025-10-09T09:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.123649 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.123688 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.123702 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.123716 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.123725 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:54Z","lastTransitionTime":"2025-10-09T09:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.225207 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.225250 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.225258 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.225271 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.225280 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:54Z","lastTransitionTime":"2025-10-09T09:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.327043 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.327098 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.327109 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.327119 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.327127 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:54Z","lastTransitionTime":"2025-10-09T09:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.428865 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.428893 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.428901 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.428913 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.428920 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:54Z","lastTransitionTime":"2025-10-09T09:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.530463 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.530500 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.530510 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.530525 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.530535 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:54Z","lastTransitionTime":"2025-10-09T09:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.632758 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.632797 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.632807 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.632821 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.632830 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:54Z","lastTransitionTime":"2025-10-09T09:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.733958 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.733995 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.734006 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.734020 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.734030 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:54Z","lastTransitionTime":"2025-10-09T09:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.814866 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.814888 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:54 crc kubenswrapper[4710]: E1009 09:05:54.814972 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.814993 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.815015 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:54 crc kubenswrapper[4710]: E1009 09:05:54.815086 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:54 crc kubenswrapper[4710]: E1009 09:05:54.815159 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:54 crc kubenswrapper[4710]: E1009 09:05:54.815233 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.835863 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.835890 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.835898 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.835910 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.835917 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:54Z","lastTransitionTime":"2025-10-09T09:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.937468 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.939606 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.939637 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.939652 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:54 crc kubenswrapper[4710]: I1009 09:05:54.939667 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:54Z","lastTransitionTime":"2025-10-09T09:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.041664 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.041695 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.041704 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.041716 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.041724 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:55Z","lastTransitionTime":"2025-10-09T09:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.143596 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.143629 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.143638 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.143649 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.143658 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:55Z","lastTransitionTime":"2025-10-09T09:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.245692 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.245898 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.245907 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.245922 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.245930 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:55Z","lastTransitionTime":"2025-10-09T09:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.347552 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.347657 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.347721 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.347781 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.347836 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:55Z","lastTransitionTime":"2025-10-09T09:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.449593 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.449699 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.449762 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.449822 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.449890 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:55Z","lastTransitionTime":"2025-10-09T09:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.551116 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.551141 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.551149 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.551158 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.551167 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:55Z","lastTransitionTime":"2025-10-09T09:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.652229 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.652262 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.652270 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.652282 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.652291 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:55Z","lastTransitionTime":"2025-10-09T09:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.754271 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.754300 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.754310 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.754322 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.754332 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:55Z","lastTransitionTime":"2025-10-09T09:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.855838 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.855896 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.855906 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.855915 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.855923 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:55Z","lastTransitionTime":"2025-10-09T09:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.900676 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.900723 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.900733 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.900745 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.900754 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:55Z","lastTransitionTime":"2025-10-09T09:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:55 crc kubenswrapper[4710]: E1009 09:05:55.911500 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:55Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.913560 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.913589 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.913597 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.913609 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.913618 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:55Z","lastTransitionTime":"2025-10-09T09:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:55 crc kubenswrapper[4710]: E1009 09:05:55.921564 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:55Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.923814 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.923841 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.923849 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.923859 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.923867 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:55Z","lastTransitionTime":"2025-10-09T09:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:55 crc kubenswrapper[4710]: E1009 09:05:55.931503 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:55Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.933723 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.933764 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.933774 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.933784 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.933792 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:55Z","lastTransitionTime":"2025-10-09T09:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:55 crc kubenswrapper[4710]: E1009 09:05:55.941906 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:55Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.944063 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.944110 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.944119 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.944128 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.944134 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:55Z","lastTransitionTime":"2025-10-09T09:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:55 crc kubenswrapper[4710]: E1009 09:05:55.951583 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:55Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:55 crc kubenswrapper[4710]: E1009 09:05:55.951682 4710 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.957564 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.957589 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.957598 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.957607 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:55 crc kubenswrapper[4710]: I1009 09:05:55.957614 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:55Z","lastTransitionTime":"2025-10-09T09:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.059290 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.059327 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.059337 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.059348 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.059357 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:56Z","lastTransitionTime":"2025-10-09T09:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.161300 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.161323 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.161331 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.161384 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.161399 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:56Z","lastTransitionTime":"2025-10-09T09:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.263234 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.263263 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.263272 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.263284 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.263292 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:56Z","lastTransitionTime":"2025-10-09T09:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.365073 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.365120 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.365128 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.365141 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.365149 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:56Z","lastTransitionTime":"2025-10-09T09:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.466952 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.466980 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.466987 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.467000 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.467008 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:56Z","lastTransitionTime":"2025-10-09T09:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.568975 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.569005 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.569013 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.569026 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.569034 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:56Z","lastTransitionTime":"2025-10-09T09:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.670965 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.670994 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.671002 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.671012 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.671144 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:56Z","lastTransitionTime":"2025-10-09T09:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.773164 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.773196 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.773205 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.773216 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.773238 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:56Z","lastTransitionTime":"2025-10-09T09:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.814243 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.814264 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:56 crc kubenswrapper[4710]: E1009 09:05:56.814338 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.814345 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:56 crc kubenswrapper[4710]: E1009 09:05:56.814487 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.814502 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:56 crc kubenswrapper[4710]: E1009 09:05:56.814578 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:56 crc kubenswrapper[4710]: E1009 09:05:56.814616 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.823623 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2651d0d9-69b6-43e3-ac07-2b282edaf457\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb3a6a6b28c64ba39e628cd4f0ca4cdd07fecb98e9b5a5598b9bbc06d90ec825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a3613a4e5d2245727f06ede3f9973d27bdc4f1e4143ded9ce439003c695a1a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a3613a4e5d2245727f06ede3f9973d27bdc4f1e4143ded9ce439003c695a1a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.833126 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5954ec1339b6e50d0c48c43be96a3a54b58d4fe715bdc47b91aa80b107aad7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"2025-10-09T09:05:00+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_27d3f5c3-bae8-4611-9873-21e4a112b64c\\\\n2025-10-09T09:05:00+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_27d3f5c3-bae8-4611-9873-21e4a112b64c to /host/opt/cni/bin/\\\\n2025-10-09T09:05:00Z [verbose] multus-daemon started\\\\n2025-10-09T09:05:00Z [verbose] Readiness Indicator file check\\\\n2025-10-09T09:05:45Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.843474 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.851324 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.858828 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.866267 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.873285 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.874207 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.874236 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.874245 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.874255 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.874263 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:56Z","lastTransitionTime":"2025-10-09T09:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.885063 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.895895 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://431d9382a775d61fe64040d291ea0b11e4630c662a1e667161e3de6cab041027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3daa27b2a2509659f618bb8072b3ed3e4897519b9fc45b0ff5773475d34fba1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-b9p6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.905149 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.913056 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.920452 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.926620 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.938866 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:49Z\\\",\\\"message\\\":\\\"0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z]\\\\nI1009 09:05:49.475469 6664 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1009 09:05:49.475389 6664 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1009 09:05:49.475474 6664 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-l4vw4\\\\nI1009 09:05:49.475418 6664 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/kube-rbac-proxy-crio-crc\\\\nI1009 09:05:49.475479 6664 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1009 09:05:4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.945392 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-p9sh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-p9sh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.952935 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.960045 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eee34f9-2622-40a1-a8b4-e8543d642fad\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fc8f2eb3fc14287e7a31ee0bf390c36a0744a9d95e7924eee729f4df93089d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9898c2889f9a6045a8fe7b25dd8abbc4da70b547b547031167ef613827ca5922\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcd9e68107e1bf6c79d4d74d2069de37bcd60eed181f032371d47463ea6cf90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://208b302ef86806904caaf46782331845b3c0c67376a4f4747b3757a4899582e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://208b302ef86806904caaf46782331845b3c0c67376a4f4747b3757a4899582e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.967668 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:56Z is after 2025-08-24T17:21:41Z" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.975988 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.976016 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.976024 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.976036 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:56 crc kubenswrapper[4710]: I1009 09:05:56.976045 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:56Z","lastTransitionTime":"2025-10-09T09:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.077717 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.077740 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.077748 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.077759 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.077768 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:57Z","lastTransitionTime":"2025-10-09T09:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.179068 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.179098 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.179108 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.179119 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.179127 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:57Z","lastTransitionTime":"2025-10-09T09:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.280822 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.280856 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.280867 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.280879 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.280892 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:57Z","lastTransitionTime":"2025-10-09T09:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.382549 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.382578 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.382586 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.382596 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.382604 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:57Z","lastTransitionTime":"2025-10-09T09:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.484034 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.484087 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.484095 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.484105 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.484113 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:57Z","lastTransitionTime":"2025-10-09T09:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.585611 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.585641 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.585650 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.585660 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.585688 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:57Z","lastTransitionTime":"2025-10-09T09:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.687056 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.687081 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.687089 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.687098 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.687106 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:57Z","lastTransitionTime":"2025-10-09T09:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.788888 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.788909 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.788917 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.788926 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.788934 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:57Z","lastTransitionTime":"2025-10-09T09:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.890046 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.890068 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.890076 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.890106 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.890114 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:57Z","lastTransitionTime":"2025-10-09T09:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.991724 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.991749 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.991757 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.991766 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:57 crc kubenswrapper[4710]: I1009 09:05:57.991773 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:57Z","lastTransitionTime":"2025-10-09T09:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.093414 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.093450 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.093459 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.093469 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.093476 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:58Z","lastTransitionTime":"2025-10-09T09:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.195059 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.195081 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.195090 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.195105 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.195116 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:58Z","lastTransitionTime":"2025-10-09T09:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.296947 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.297066 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.297132 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.297191 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.297267 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:58Z","lastTransitionTime":"2025-10-09T09:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.398589 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.398627 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.398637 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.398651 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.398661 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:58Z","lastTransitionTime":"2025-10-09T09:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.500027 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.500175 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.500327 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.500392 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.500473 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:58Z","lastTransitionTime":"2025-10-09T09:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.569517 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:05:58 crc kubenswrapper[4710]: E1009 09:05:58.569680 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:02.569666736 +0000 UTC m=+146.059775133 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.602400 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.602533 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.602609 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.602676 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.602729 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:58Z","lastTransitionTime":"2025-10-09T09:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.670848 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.670949 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.670980 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.671007 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:58 crc kubenswrapper[4710]: E1009 09:05:58.671045 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 09:05:58 crc kubenswrapper[4710]: E1009 09:05:58.671067 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 09:05:58 crc kubenswrapper[4710]: E1009 09:05:58.671078 4710 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:05:58 crc kubenswrapper[4710]: E1009 09:05:58.671074 4710 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 09:05:58 crc kubenswrapper[4710]: E1009 09:05:58.671124 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-09 09:07:02.671111621 +0000 UTC m=+146.161220028 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:05:58 crc kubenswrapper[4710]: E1009 09:05:58.671155 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 09:07:02.67114226 +0000 UTC m=+146.161250667 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 09 09:05:58 crc kubenswrapper[4710]: E1009 09:05:58.671098 4710 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 09:05:58 crc kubenswrapper[4710]: E1009 09:05:58.671217 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-09 09:07:02.671204687 +0000 UTC m=+146.161313094 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 09 09:05:58 crc kubenswrapper[4710]: E1009 09:05:58.671342 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 09 09:05:58 crc kubenswrapper[4710]: E1009 09:05:58.671401 4710 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 09 09:05:58 crc kubenswrapper[4710]: E1009 09:05:58.671490 4710 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:05:58 crc kubenswrapper[4710]: E1009 09:05:58.671569 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-09 09:07:02.671559757 +0000 UTC m=+146.161668154 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.704917 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.704945 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.704954 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.704967 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.704975 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:58Z","lastTransitionTime":"2025-10-09T09:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.806827 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.806962 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.807031 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.807090 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.807150 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:58Z","lastTransitionTime":"2025-10-09T09:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.814091 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:05:58 crc kubenswrapper[4710]: E1009 09:05:58.814244 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.814141 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:05:58 crc kubenswrapper[4710]: E1009 09:05:58.814441 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.814121 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:05:58 crc kubenswrapper[4710]: E1009 09:05:58.814608 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.814142 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:05:58 crc kubenswrapper[4710]: E1009 09:05:58.814828 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.908871 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.908910 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.908923 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.908937 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:58 crc kubenswrapper[4710]: I1009 09:05:58.908948 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:58Z","lastTransitionTime":"2025-10-09T09:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.010466 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.010844 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.010921 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.010991 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.011050 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:59Z","lastTransitionTime":"2025-10-09T09:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.112516 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.112547 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.112555 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.112566 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.112574 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:59Z","lastTransitionTime":"2025-10-09T09:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.214008 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.214031 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.214040 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.214050 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.214058 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:59Z","lastTransitionTime":"2025-10-09T09:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.315498 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.315671 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.315757 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.315824 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.315884 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:59Z","lastTransitionTime":"2025-10-09T09:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.417245 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.417362 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.417454 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.417523 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.417583 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:59Z","lastTransitionTime":"2025-10-09T09:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.519097 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.519238 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.519314 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.519388 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.519469 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:59Z","lastTransitionTime":"2025-10-09T09:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.620844 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.620879 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.620888 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.620899 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.620907 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:59Z","lastTransitionTime":"2025-10-09T09:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.722775 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.723003 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.723080 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.723148 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.723213 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:59Z","lastTransitionTime":"2025-10-09T09:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.825165 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.825193 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.825261 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.825285 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.825294 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:59Z","lastTransitionTime":"2025-10-09T09:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.926849 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.926892 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.926901 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.926911 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:05:59 crc kubenswrapper[4710]: I1009 09:05:59.926920 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:05:59Z","lastTransitionTime":"2025-10-09T09:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.027964 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.027992 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.028000 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.028012 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.028021 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:00Z","lastTransitionTime":"2025-10-09T09:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.129949 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.129997 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.130015 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.130034 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.130048 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:00Z","lastTransitionTime":"2025-10-09T09:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.231558 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.231589 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.231598 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.231609 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.231618 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:00Z","lastTransitionTime":"2025-10-09T09:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.333320 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.333343 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.333351 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.333361 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.333370 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:00Z","lastTransitionTime":"2025-10-09T09:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.434456 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.434490 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.434501 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.434513 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.434538 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:00Z","lastTransitionTime":"2025-10-09T09:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.536023 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.536181 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.536255 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.536320 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.536382 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:00Z","lastTransitionTime":"2025-10-09T09:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.638381 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.638407 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.638416 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.638427 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.638450 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:00Z","lastTransitionTime":"2025-10-09T09:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.740787 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.740819 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.740827 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.740839 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.740847 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:00Z","lastTransitionTime":"2025-10-09T09:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.814278 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.814316 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.814323 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:00 crc kubenswrapper[4710]: E1009 09:06:00.814360 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.814445 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:00 crc kubenswrapper[4710]: E1009 09:06:00.814506 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:00 crc kubenswrapper[4710]: E1009 09:06:00.814612 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:00 crc kubenswrapper[4710]: E1009 09:06:00.814641 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.842569 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.842591 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.842599 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.842609 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.842616 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:00Z","lastTransitionTime":"2025-10-09T09:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.943586 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.943618 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.943626 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.943638 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:00 crc kubenswrapper[4710]: I1009 09:06:00.943647 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:00Z","lastTransitionTime":"2025-10-09T09:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.045289 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.045316 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.045324 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.045334 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.045342 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:01Z","lastTransitionTime":"2025-10-09T09:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.146295 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.146323 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.146333 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.146343 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.146350 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:01Z","lastTransitionTime":"2025-10-09T09:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.248275 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.248311 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.248321 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.248330 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.248337 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:01Z","lastTransitionTime":"2025-10-09T09:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.350498 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.350617 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.350672 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.350733 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.350787 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:01Z","lastTransitionTime":"2025-10-09T09:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.452871 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.452896 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.452903 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.452913 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.452920 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:01Z","lastTransitionTime":"2025-10-09T09:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.555087 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.555115 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.555123 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.555135 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.555143 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:01Z","lastTransitionTime":"2025-10-09T09:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.657294 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.657325 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.657334 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.657345 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.657354 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:01Z","lastTransitionTime":"2025-10-09T09:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.759565 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.759728 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.759802 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.759864 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.759919 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:01Z","lastTransitionTime":"2025-10-09T09:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.853414 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.861317 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.861346 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.861355 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.861366 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.861376 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:01Z","lastTransitionTime":"2025-10-09T09:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.962961 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.962987 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.962995 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.963007 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:01 crc kubenswrapper[4710]: I1009 09:06:01.963014 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:01Z","lastTransitionTime":"2025-10-09T09:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.064932 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.064964 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.064974 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.064986 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.064994 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:02Z","lastTransitionTime":"2025-10-09T09:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.166192 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.166230 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.166238 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.166249 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.166256 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:02Z","lastTransitionTime":"2025-10-09T09:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.268558 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.268585 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.268593 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.268603 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.268611 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:02Z","lastTransitionTime":"2025-10-09T09:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.370361 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.370400 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.370410 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.370424 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.370455 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:02Z","lastTransitionTime":"2025-10-09T09:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.471512 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.471537 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.471545 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.471557 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.471564 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:02Z","lastTransitionTime":"2025-10-09T09:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.573108 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.573130 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.573139 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.573149 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.573157 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:02Z","lastTransitionTime":"2025-10-09T09:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.674861 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.674889 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.674897 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.674906 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.674915 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:02Z","lastTransitionTime":"2025-10-09T09:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.776967 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.776999 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.777007 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.777020 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.777028 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:02Z","lastTransitionTime":"2025-10-09T09:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.814845 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.814873 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:02 crc kubenswrapper[4710]: E1009 09:06:02.814934 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.814849 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.814992 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:02 crc kubenswrapper[4710]: E1009 09:06:02.815059 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:02 crc kubenswrapper[4710]: E1009 09:06:02.815113 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:02 crc kubenswrapper[4710]: E1009 09:06:02.815266 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.878915 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.878935 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.878942 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.878952 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.878960 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:02Z","lastTransitionTime":"2025-10-09T09:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.980350 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.980509 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.980577 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.980649 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:02 crc kubenswrapper[4710]: I1009 09:06:02.980716 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:02Z","lastTransitionTime":"2025-10-09T09:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.082150 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.082172 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.082179 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.082188 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.082195 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:03Z","lastTransitionTime":"2025-10-09T09:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.184234 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.184260 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.184269 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.184278 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.184285 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:03Z","lastTransitionTime":"2025-10-09T09:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.285906 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.286074 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.286165 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.286235 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.286303 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:03Z","lastTransitionTime":"2025-10-09T09:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.388136 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.388165 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.388175 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.388186 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.388195 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:03Z","lastTransitionTime":"2025-10-09T09:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.489707 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.489815 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.489885 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.489940 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.489994 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:03Z","lastTransitionTime":"2025-10-09T09:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.591882 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.592049 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.592202 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.592360 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.592507 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:03Z","lastTransitionTime":"2025-10-09T09:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.695827 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.695862 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.695870 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.695882 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.695891 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:03Z","lastTransitionTime":"2025-10-09T09:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.797696 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.797726 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.797734 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.797747 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.797756 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:03Z","lastTransitionTime":"2025-10-09T09:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.899795 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.899824 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.899832 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.899843 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:03 crc kubenswrapper[4710]: I1009 09:06:03.899851 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:03Z","lastTransitionTime":"2025-10-09T09:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.001454 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.001532 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.001540 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.001550 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.001558 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:04Z","lastTransitionTime":"2025-10-09T09:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.103116 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.103466 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.103543 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.103605 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.103665 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:04Z","lastTransitionTime":"2025-10-09T09:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.205303 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.205342 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.205351 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.205365 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.205373 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:04Z","lastTransitionTime":"2025-10-09T09:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.307580 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.307630 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.307640 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.307651 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.307659 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:04Z","lastTransitionTime":"2025-10-09T09:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.409589 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.409622 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.409630 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.409643 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.409652 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:04Z","lastTransitionTime":"2025-10-09T09:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.511859 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.511893 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.511904 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.511915 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.511924 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:04Z","lastTransitionTime":"2025-10-09T09:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.613676 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.613714 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.613722 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.613734 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.613742 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:04Z","lastTransitionTime":"2025-10-09T09:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.715800 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.715842 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.715852 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.715864 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.715874 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:04Z","lastTransitionTime":"2025-10-09T09:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.814732 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.814760 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.814784 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.814748 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:04 crc kubenswrapper[4710]: E1009 09:06:04.815110 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:04 crc kubenswrapper[4710]: E1009 09:06:04.815161 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:04 crc kubenswrapper[4710]: E1009 09:06:04.815217 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:04 crc kubenswrapper[4710]: E1009 09:06:04.815303 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.815361 4710 scope.go:117] "RemoveContainer" containerID="494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19" Oct 09 09:06:04 crc kubenswrapper[4710]: E1009 09:06:04.815499 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.817243 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.817268 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.817277 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.817319 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.817329 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:04Z","lastTransitionTime":"2025-10-09T09:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.919086 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.919245 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.919309 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.919390 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:04 crc kubenswrapper[4710]: I1009 09:06:04.919470 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:04Z","lastTransitionTime":"2025-10-09T09:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.020857 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.020886 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.020896 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.020905 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.020913 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:05Z","lastTransitionTime":"2025-10-09T09:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.122660 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.122687 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.122697 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.122707 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.122714 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:05Z","lastTransitionTime":"2025-10-09T09:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.224259 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.224285 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.224294 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.224304 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.224312 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:05Z","lastTransitionTime":"2025-10-09T09:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.326156 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.326176 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.326184 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.326194 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.326202 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:05Z","lastTransitionTime":"2025-10-09T09:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.427902 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.427924 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.427932 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.427940 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.427947 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:05Z","lastTransitionTime":"2025-10-09T09:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.529771 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.529804 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.529813 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.529836 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.529846 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:05Z","lastTransitionTime":"2025-10-09T09:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.631336 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.631368 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.631379 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.631392 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.631400 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:05Z","lastTransitionTime":"2025-10-09T09:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.733037 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.733065 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.733074 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.733082 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.733090 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:05Z","lastTransitionTime":"2025-10-09T09:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.834918 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.834948 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.834956 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.834966 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.834974 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:05Z","lastTransitionTime":"2025-10-09T09:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.936824 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.936849 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.936857 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.936868 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:05 crc kubenswrapper[4710]: I1009 09:06:05.936875 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:05Z","lastTransitionTime":"2025-10-09T09:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.038160 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.038183 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.038192 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.038201 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.038209 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:06Z","lastTransitionTime":"2025-10-09T09:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.139619 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.139653 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.139663 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.139675 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.139684 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:06Z","lastTransitionTime":"2025-10-09T09:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.241155 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.241301 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.241405 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.241498 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.241573 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:06Z","lastTransitionTime":"2025-10-09T09:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.279645 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.279749 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.279805 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.279986 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.280047 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:06Z","lastTransitionTime":"2025-10-09T09:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:06 crc kubenswrapper[4710]: E1009 09:06:06.288421 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.290789 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.290816 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.290825 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.290835 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.290843 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:06Z","lastTransitionTime":"2025-10-09T09:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:06 crc kubenswrapper[4710]: E1009 09:06:06.299098 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.301035 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.301056 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.301064 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.301075 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.301085 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:06Z","lastTransitionTime":"2025-10-09T09:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:06 crc kubenswrapper[4710]: E1009 09:06:06.308698 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.310624 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.310647 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.310654 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.310663 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.310670 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:06Z","lastTransitionTime":"2025-10-09T09:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:06 crc kubenswrapper[4710]: E1009 09:06:06.318684 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.321522 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.321553 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.321564 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.321576 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.321589 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:06Z","lastTransitionTime":"2025-10-09T09:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:06 crc kubenswrapper[4710]: E1009 09:06:06.330068 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: E1009 09:06:06.330169 4710 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.343165 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.343267 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.343330 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.343388 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.343477 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:06Z","lastTransitionTime":"2025-10-09T09:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.445382 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.445407 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.445446 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.445457 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.445465 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:06Z","lastTransitionTime":"2025-10-09T09:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.547133 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.547161 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.547170 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.547180 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.547187 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:06Z","lastTransitionTime":"2025-10-09T09:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.648954 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.648978 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.648985 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.648994 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.649001 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:06Z","lastTransitionTime":"2025-10-09T09:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.750282 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.750315 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.750324 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.750335 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.750343 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:06Z","lastTransitionTime":"2025-10-09T09:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.814258 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.814258 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.814309 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.814310 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:06 crc kubenswrapper[4710]: E1009 09:06:06.814472 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:06 crc kubenswrapper[4710]: E1009 09:06:06.814527 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:06 crc kubenswrapper[4710]: E1009 09:06:06.814599 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:06 crc kubenswrapper[4710]: E1009 09:06:06.814656 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.824772 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.831936 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eee34f9-2622-40a1-a8b4-e8543d642fad\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fc8f2eb3fc14287e7a31ee0bf390c36a0744a9d95e7924eee729f4df93089d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9898c2889f9a6045a8fe7b25dd8abbc4da70b547b547031167ef613827ca5922\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcd9e68107e1bf6c79d4d74d2069de37bcd60eed181f032371d47463ea6cf90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://208b302ef86806904caaf46782331845b3c0c67376a4f4747b3757a4899582e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://208b302ef86806904caaf46782331845b3c0c67376a4f4747b3757a4899582e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.844514 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5856af22-9535-479e-8bde-0c1ba9a699a8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33e9742ccbade317b6ab495d6fec629058fba4bc6cbc22b8fb403eca0d9b1f12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://029c6476ce646f47c6104164ce2bffe8ac68b2b65c594a25308345f8befdcfef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f35c2c8069414d1bbbf0bfe4e7020dbfa78d024ec43161db9a06fccdc23b6e55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38153b764b0e0a4be98293a91d411c7a4e036de3963c35d426e02352568fecd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65b3efceddeaee218d95f885a5ae7a54983cc0746bcb1674484fd9089d21c98a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15d0b78afb2dd6356645d2d3ab457694e493abbdb148dabd9b882edfac4b16a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://15d0b78afb2dd6356645d2d3ab457694e493abbdb148dabd9b882edfac4b16a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://307b09154fb85493f316ce8cc526df61f6043752a676b53428edafcbb5a9df21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://307b09154fb85493f316ce8cc526df61f6043752a676b53428edafcbb5a9df21\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1babaa7a26416781a494eb627f2bb129d613ee5707d5ce69b9bfc570980640ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1babaa7a26416781a494eb627f2bb129d613ee5707d5ce69b9bfc570980640ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.851862 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.851895 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.851905 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.851914 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.851922 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:06Z","lastTransitionTime":"2025-10-09T09:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.853098 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.865563 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:49Z\\\",\\\"message\\\":\\\"0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z]\\\\nI1009 09:05:49.475469 6664 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1009 09:05:49.475389 6664 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1009 09:05:49.475474 6664 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-l4vw4\\\\nI1009 09:05:49.475418 6664 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/kube-rbac-proxy-crio-crc\\\\nI1009 09:05:49.475479 6664 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1009 09:05:4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.871999 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-p9sh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-p9sh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.881291 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2651d0d9-69b6-43e3-ac07-2b282edaf457\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb3a6a6b28c64ba39e628cd4f0ca4cdd07fecb98e9b5a5598b9bbc06d90ec825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a3613a4e5d2245727f06ede3f9973d27bdc4f1e4143ded9ce439003c695a1a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a3613a4e5d2245727f06ede3f9973d27bdc4f1e4143ded9ce439003c695a1a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.890677 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5954ec1339b6e50d0c48c43be96a3a54b58d4fe715bdc47b91aa80b107aad7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"2025-10-09T09:05:00+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_27d3f5c3-bae8-4611-9873-21e4a112b64c\\\\n2025-10-09T09:05:00+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_27d3f5c3-bae8-4611-9873-21e4a112b64c to /host/opt/cni/bin/\\\\n2025-10-09T09:05:00Z [verbose] multus-daemon started\\\\n2025-10-09T09:05:00Z [verbose] Readiness Indicator file check\\\\n2025-10-09T09:05:45Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.899675 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.907755 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.915268 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.922285 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.929048 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.935177 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.943786 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.951751 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.953679 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.953720 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.953730 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.953744 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.953752 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:06Z","lastTransitionTime":"2025-10-09T09:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.959558 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.966254 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:06 crc kubenswrapper[4710]: I1009 09:06:06.973283 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://431d9382a775d61fe64040d291ea0b11e4630c662a1e667161e3de6cab041027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3daa27b2a2509659f618bb8072b3ed3e4897519b9fc45b0ff5773475d34fba1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-b9p6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:06Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.055264 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.055286 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.055294 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.055321 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.055330 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:07Z","lastTransitionTime":"2025-10-09T09:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.156848 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.157119 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.157201 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.157276 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.157330 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:07Z","lastTransitionTime":"2025-10-09T09:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.258652 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.258768 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.258838 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.258907 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.258967 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:07Z","lastTransitionTime":"2025-10-09T09:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.360574 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.360605 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.360613 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.360625 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.360636 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:07Z","lastTransitionTime":"2025-10-09T09:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.462752 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.462780 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.462787 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.462799 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.462808 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:07Z","lastTransitionTime":"2025-10-09T09:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.564891 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.564918 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.564927 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.564937 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.564946 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:07Z","lastTransitionTime":"2025-10-09T09:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.666416 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.666480 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.666489 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.666500 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.666509 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:07Z","lastTransitionTime":"2025-10-09T09:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.768448 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.768506 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.768517 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.768528 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.768535 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:07Z","lastTransitionTime":"2025-10-09T09:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.870547 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.870579 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.870587 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.870596 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.870604 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:07Z","lastTransitionTime":"2025-10-09T09:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.972730 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.972758 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.972766 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.972781 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:07 crc kubenswrapper[4710]: I1009 09:06:07.972789 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:07Z","lastTransitionTime":"2025-10-09T09:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.074452 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.074482 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.074491 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.074504 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.074513 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:08Z","lastTransitionTime":"2025-10-09T09:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.175774 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.176059 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.176136 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.176200 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.176276 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:08Z","lastTransitionTime":"2025-10-09T09:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.277767 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.277789 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.277796 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.277805 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.277813 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:08Z","lastTransitionTime":"2025-10-09T09:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.379423 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.379470 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.379479 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.379490 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.379499 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:08Z","lastTransitionTime":"2025-10-09T09:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.480713 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.480744 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.480753 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.480765 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.480773 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:08Z","lastTransitionTime":"2025-10-09T09:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.581930 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.581958 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.581966 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.581976 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.581983 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:08Z","lastTransitionTime":"2025-10-09T09:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.683785 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.683812 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.683820 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.683831 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.683839 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:08Z","lastTransitionTime":"2025-10-09T09:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.784997 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.785105 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.785165 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.785237 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.785299 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:08Z","lastTransitionTime":"2025-10-09T09:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.814484 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.814531 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:08 crc kubenswrapper[4710]: E1009 09:06:08.814564 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.814596 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:08 crc kubenswrapper[4710]: E1009 09:06:08.814652 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:08 crc kubenswrapper[4710]: E1009 09:06:08.814703 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.814723 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:08 crc kubenswrapper[4710]: E1009 09:06:08.814799 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.886862 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.886967 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.887034 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.887105 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.887173 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:08Z","lastTransitionTime":"2025-10-09T09:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.988644 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.988761 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.988826 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.988896 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:08 crc kubenswrapper[4710]: I1009 09:06:08.988952 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:08Z","lastTransitionTime":"2025-10-09T09:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.091189 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.091230 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.091241 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.091254 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.091264 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:09Z","lastTransitionTime":"2025-10-09T09:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.192464 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.192494 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.192504 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.192515 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.192523 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:09Z","lastTransitionTime":"2025-10-09T09:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.293791 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.293824 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.293832 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.293851 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.293860 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:09Z","lastTransitionTime":"2025-10-09T09:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.395463 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.395495 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.395503 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.395514 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.395523 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:09Z","lastTransitionTime":"2025-10-09T09:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.496932 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.496962 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.496972 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.496984 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.496991 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:09Z","lastTransitionTime":"2025-10-09T09:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.598586 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.598613 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.598620 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.598630 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.598638 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:09Z","lastTransitionTime":"2025-10-09T09:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.700123 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.700153 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.700162 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.700171 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.700180 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:09Z","lastTransitionTime":"2025-10-09T09:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.802172 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.802206 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.802218 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.802241 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.802251 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:09Z","lastTransitionTime":"2025-10-09T09:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.904028 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.904053 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.904061 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.904070 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:09 crc kubenswrapper[4710]: I1009 09:06:09.904078 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:09Z","lastTransitionTime":"2025-10-09T09:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.005945 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.005981 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.005990 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.006005 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.006016 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:10Z","lastTransitionTime":"2025-10-09T09:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.107671 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.107704 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.107752 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.107778 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.107787 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:10Z","lastTransitionTime":"2025-10-09T09:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.209819 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.209851 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.209860 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.209871 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.209883 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:10Z","lastTransitionTime":"2025-10-09T09:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.311849 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.311879 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.311887 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.311899 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.311923 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:10Z","lastTransitionTime":"2025-10-09T09:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.413559 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.413582 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.413591 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.413604 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.413612 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:10Z","lastTransitionTime":"2025-10-09T09:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.515547 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.515582 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.515590 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.515601 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.515609 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:10Z","lastTransitionTime":"2025-10-09T09:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.617038 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.617071 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.617080 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.617093 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.617104 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:10Z","lastTransitionTime":"2025-10-09T09:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.719017 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.719046 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.719056 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.719068 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.719078 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:10Z","lastTransitionTime":"2025-10-09T09:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.814242 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.814260 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.814296 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:10 crc kubenswrapper[4710]: E1009 09:06:10.814337 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.814408 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:10 crc kubenswrapper[4710]: E1009 09:06:10.814405 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:10 crc kubenswrapper[4710]: E1009 09:06:10.814538 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:10 crc kubenswrapper[4710]: E1009 09:06:10.814573 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.820754 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.820779 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.820789 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.820800 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.820808 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:10Z","lastTransitionTime":"2025-10-09T09:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.922655 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.922752 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.922760 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.922770 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:10 crc kubenswrapper[4710]: I1009 09:06:10.922778 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:10Z","lastTransitionTime":"2025-10-09T09:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.024135 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.024161 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.024169 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.024178 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.024186 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:11Z","lastTransitionTime":"2025-10-09T09:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.125734 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.125760 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.125768 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.125778 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.125786 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:11Z","lastTransitionTime":"2025-10-09T09:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.227869 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.227893 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.227901 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.227915 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.227923 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:11Z","lastTransitionTime":"2025-10-09T09:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.330127 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.330185 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.330194 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.330205 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.330214 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:11Z","lastTransitionTime":"2025-10-09T09:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.431998 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.432023 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.432030 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.432040 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.432061 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:11Z","lastTransitionTime":"2025-10-09T09:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.533296 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.533316 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.533324 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.533333 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.533340 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:11Z","lastTransitionTime":"2025-10-09T09:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.635357 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.635388 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.635395 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.635405 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.635411 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:11Z","lastTransitionTime":"2025-10-09T09:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.737911 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.737935 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.737943 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.737952 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.737960 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:11Z","lastTransitionTime":"2025-10-09T09:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.839712 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.839733 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.839741 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.839756 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.839763 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:11Z","lastTransitionTime":"2025-10-09T09:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.942068 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.942190 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.942260 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.942342 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:11 crc kubenswrapper[4710]: I1009 09:06:11.942398 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:11Z","lastTransitionTime":"2025-10-09T09:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.043733 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.043860 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.043927 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.043985 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.044048 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:12Z","lastTransitionTime":"2025-10-09T09:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.145399 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.145425 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.145447 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.145456 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.145464 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:12Z","lastTransitionTime":"2025-10-09T09:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.246866 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.247184 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.247270 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.247337 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.247388 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:12Z","lastTransitionTime":"2025-10-09T09:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.349903 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.349929 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.349938 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.349947 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.349955 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:12Z","lastTransitionTime":"2025-10-09T09:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.451814 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.451839 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.451847 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.451856 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.451869 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:12Z","lastTransitionTime":"2025-10-09T09:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.553599 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.553621 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.553629 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.553637 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.553645 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:12Z","lastTransitionTime":"2025-10-09T09:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.655559 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.655583 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.655590 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.655599 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.655606 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:12Z","lastTransitionTime":"2025-10-09T09:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.757353 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.757408 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.757419 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.757449 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.757459 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:12Z","lastTransitionTime":"2025-10-09T09:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.814028 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.814169 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.814203 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:12 crc kubenswrapper[4710]: E1009 09:06:12.814267 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.814463 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:12 crc kubenswrapper[4710]: E1009 09:06:12.814515 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:12 crc kubenswrapper[4710]: E1009 09:06:12.814568 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:12 crc kubenswrapper[4710]: E1009 09:06:12.814614 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.859095 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.859122 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.859132 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.859144 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.859152 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:12Z","lastTransitionTime":"2025-10-09T09:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.960168 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.960201 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.960211 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.960238 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:12 crc kubenswrapper[4710]: I1009 09:06:12.960249 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:12Z","lastTransitionTime":"2025-10-09T09:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.061563 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.061587 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.061600 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.061611 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.061621 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:13Z","lastTransitionTime":"2025-10-09T09:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.163463 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.163571 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.163629 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.163697 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.163748 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:13Z","lastTransitionTime":"2025-10-09T09:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.265208 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.265249 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.265259 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.265269 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.265276 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:13Z","lastTransitionTime":"2025-10-09T09:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.367646 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.367676 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.367700 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.367710 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.367717 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:13Z","lastTransitionTime":"2025-10-09T09:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.468923 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.468945 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.468953 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.468962 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.468969 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:13Z","lastTransitionTime":"2025-10-09T09:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.570666 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.570692 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.570700 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.570710 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.570717 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:13Z","lastTransitionTime":"2025-10-09T09:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.672360 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.672383 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.672392 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.672401 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.672410 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:13Z","lastTransitionTime":"2025-10-09T09:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.773785 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.773805 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.773812 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.773821 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.773828 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:13Z","lastTransitionTime":"2025-10-09T09:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.875725 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.875753 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.875762 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.875772 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.875780 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:13Z","lastTransitionTime":"2025-10-09T09:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.977468 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.977493 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.977500 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.977510 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:13 crc kubenswrapper[4710]: I1009 09:06:13.977517 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:13Z","lastTransitionTime":"2025-10-09T09:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.079039 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.079060 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.079067 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.079076 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.079083 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:14Z","lastTransitionTime":"2025-10-09T09:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.180834 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.180861 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.180870 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.180881 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.180890 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:14Z","lastTransitionTime":"2025-10-09T09:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.282507 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.282527 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.282535 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.282543 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.282550 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:14Z","lastTransitionTime":"2025-10-09T09:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.384096 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.384121 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.384129 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.384139 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.384145 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:14Z","lastTransitionTime":"2025-10-09T09:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.485374 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.485394 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.485401 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.485409 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.485416 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:14Z","lastTransitionTime":"2025-10-09T09:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.586794 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.586814 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.586822 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.586831 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.586838 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:14Z","lastTransitionTime":"2025-10-09T09:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.688890 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.688922 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.688931 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.688943 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.688955 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:14Z","lastTransitionTime":"2025-10-09T09:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.790344 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.790524 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.790600 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.790671 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.790723 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:14Z","lastTransitionTime":"2025-10-09T09:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.814709 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.814757 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:14 crc kubenswrapper[4710]: E1009 09:06:14.814826 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.814833 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:14 crc kubenswrapper[4710]: E1009 09:06:14.814871 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:14 crc kubenswrapper[4710]: E1009 09:06:14.814919 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.814963 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:14 crc kubenswrapper[4710]: E1009 09:06:14.815067 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.892689 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.892716 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.892724 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.892734 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.892741 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:14Z","lastTransitionTime":"2025-10-09T09:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.994195 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.994219 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.994236 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.994245 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:14 crc kubenswrapper[4710]: I1009 09:06:14.994252 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:14Z","lastTransitionTime":"2025-10-09T09:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.096184 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.096200 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.096208 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.096216 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.096235 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:15Z","lastTransitionTime":"2025-10-09T09:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.197809 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.197833 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.197840 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.197849 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.197856 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:15Z","lastTransitionTime":"2025-10-09T09:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.299537 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.299559 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.299567 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.299583 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.299592 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:15Z","lastTransitionTime":"2025-10-09T09:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.401653 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.401691 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.401701 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.401713 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.401721 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:15Z","lastTransitionTime":"2025-10-09T09:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.495482 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs\") pod \"network-metrics-daemon-p9sh6\" (UID: \"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\") " pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:15 crc kubenswrapper[4710]: E1009 09:06:15.495566 4710 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 09:06:15 crc kubenswrapper[4710]: E1009 09:06:15.495613 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs podName:b237d61d-3d37-4b76-afa3-d5fe7119b0b6 nodeName:}" failed. No retries permitted until 2025-10-09 09:07:19.495600575 +0000 UTC m=+162.985708972 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs") pod "network-metrics-daemon-p9sh6" (UID: "b237d61d-3d37-4b76-afa3-d5fe7119b0b6") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.503425 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.503486 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.503500 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.503521 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.503539 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:15Z","lastTransitionTime":"2025-10-09T09:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.605040 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.605064 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.605074 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.605083 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.605089 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:15Z","lastTransitionTime":"2025-10-09T09:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.706175 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.706201 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.706209 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.706219 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.706236 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:15Z","lastTransitionTime":"2025-10-09T09:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.807630 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.807775 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.807870 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.807944 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.808017 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:15Z","lastTransitionTime":"2025-10-09T09:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.909401 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.909445 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.909454 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.909465 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:15 crc kubenswrapper[4710]: I1009 09:06:15.909472 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:15Z","lastTransitionTime":"2025-10-09T09:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.011249 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.011270 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.011278 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.011287 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.011296 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:16Z","lastTransitionTime":"2025-10-09T09:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.113553 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.113587 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.113596 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.113609 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.113618 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:16Z","lastTransitionTime":"2025-10-09T09:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.214948 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.214976 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.214984 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.214994 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.215003 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:16Z","lastTransitionTime":"2025-10-09T09:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.316825 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.316869 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.316878 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.316887 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.316895 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:16Z","lastTransitionTime":"2025-10-09T09:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.402591 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.402625 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.402633 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.402645 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.402654 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:16Z","lastTransitionTime":"2025-10-09T09:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:16 crc kubenswrapper[4710]: E1009 09:06:16.411393 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.413909 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.413936 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.413944 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.413955 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.413963 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:16Z","lastTransitionTime":"2025-10-09T09:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:16 crc kubenswrapper[4710]: E1009 09:06:16.421904 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.424236 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.424264 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.424272 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.424282 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.424289 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:16Z","lastTransitionTime":"2025-10-09T09:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:16 crc kubenswrapper[4710]: E1009 09:06:16.431748 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.433724 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.433757 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.433766 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.433778 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.433787 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:16Z","lastTransitionTime":"2025-10-09T09:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:16 crc kubenswrapper[4710]: E1009 09:06:16.441131 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.443150 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.443171 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.443180 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.443189 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.443198 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:16Z","lastTransitionTime":"2025-10-09T09:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:16 crc kubenswrapper[4710]: E1009 09:06:16.450659 4710 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-09T09:06:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"22e9dfd4-7a0f-4a29-94b5-6d66f0b46200\\\",\\\"systemUUID\\\":\\\"7ac7f3a6-4575-4e30-8696-d94e384307e4\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: E1009 09:06:16.450764 4710 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.451660 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.451682 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.451690 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.451698 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.451707 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:16Z","lastTransitionTime":"2025-10-09T09:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.552954 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.552988 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.553028 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.553043 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.553053 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:16Z","lastTransitionTime":"2025-10-09T09:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.654762 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.654797 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.654805 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.654815 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.654823 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:16Z","lastTransitionTime":"2025-10-09T09:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.757314 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.757338 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.757346 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.757356 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.757364 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:16Z","lastTransitionTime":"2025-10-09T09:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.814366 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:16 crc kubenswrapper[4710]: E1009 09:06:16.814478 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.814623 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.814662 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:16 crc kubenswrapper[4710]: E1009 09:06:16.814688 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:16 crc kubenswrapper[4710]: E1009 09:06:16.814744 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.814776 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:16 crc kubenswrapper[4710]: E1009 09:06:16.815061 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.815214 4710 scope.go:117] "RemoveContainer" containerID="494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19" Oct 09 09:06:16 crc kubenswrapper[4710]: E1009 09:06:16.815340 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.822300 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2651d0d9-69b6-43e3-ac07-2b282edaf457\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb3a6a6b28c64ba39e628cd4f0ca4cdd07fecb98e9b5a5598b9bbc06d90ec825\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7a3613a4e5d2245727f06ede3f9973d27bdc4f1e4143ded9ce439003c695a1a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a3613a4e5d2245727f06ede3f9973d27bdc4f1e4143ded9ce439003c695a1a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.830814 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-5c9mg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"421bdfde-a7ad-4e4c-aa0d-624104899b94\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5954ec1339b6e50d0c48c43be96a3a54b58d4fe715bdc47b91aa80b107aad7b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:45Z\\\",\\\"message\\\":\\\"2025-10-09T09:05:00+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_27d3f5c3-bae8-4611-9873-21e4a112b64c\\\\n2025-10-09T09:05:00+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_27d3f5c3-bae8-4611-9873-21e4a112b64c to /host/opt/cni/bin/\\\\n2025-10-09T09:05:00Z [verbose] multus-daemon started\\\\n2025-10-09T09:05:00Z [verbose] Readiness Indicator file check\\\\n2025-10-09T09:05:45Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qvjrc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-5c9mg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.840637 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b134fc1d-9d0f-4ebf-a188-92bb8bfdd014\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5cf6859cc189d5604cfd63d330c9bd702e2acac18c63e6e7009953c5726d8bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85939fd9dc967364ddbaf5bf1f0edfd7724ce30f32d654c82341d74dad75941\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ded8c78784ba9122892c5b5cb213c129b5aeb0b8a43d876e8e2842c508e1fab4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e9af4f8b78f827ff2d3e634236cdbf666916d6fe52a641cd2a1151a34429a6e3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://974de1c9c09a96cfb3d5d61a91fb34db9676ece85f3c919833233c77bf60401c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://763fec4854c6594c9b4d6ffea06a1e27c0ac8a1aa858107b5b1fb77dce773d9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d2e21f00fd567fbea4893884f4101b37aaaa53b6121025e5787765da190db6d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:05:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2tfpk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-l4vw4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.848481 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.857493 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.858789 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.858817 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.858827 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.858862 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.858871 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:16Z","lastTransitionTime":"2025-10-09T09:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.864634 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a875ef387c9ffece76680e503913ad51b44f5429787ca41c459b026d3e5120bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.871656 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f676b5cb-d273-4cac-85de-23ca7b6151b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f7b4d96125b8f79f51c6c77c201669ffb2ae94243a0dea7f69af7ce640c4959e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lpqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-fzkfm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.878288 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hrfgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"067eda20-53ab-400e-abb3-eb6184f0f60d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10020f4cd742a5494e105f510b71cf2294c0a7c7733307f23b26f3c04a6465b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sfnl7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hrfgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.888193 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0dc4849d-4a57-4312-8413-3764bc0be0b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2a505484c7f40d7a22a489b922c4361e663dd4b37df86a561bd78eea760dcb0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://10a9029f209de49e64f27e7762e6bdacbd25cd8d2bccb6ee4abad7658ee607d5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d32b32518913041cebf14fa924b8373103f745078d7b3486c21d1f3c47fc63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9d1d99c8972707482e013edef81e009b9f5e718769aa4f60525f90f18458e16\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06ee3df709cfc1f3bdfdb9fde6fd1be07cd51760de5cbcb3dc7b58081ec191b2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1009 09:04:48.872781 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1009 09:04:48.874596 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3989697959/tls.crt::/tmp/serving-cert-3989697959/tls.key\\\\\\\"\\\\nI1009 09:04:54.298806 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1009 09:04:54.302662 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1009 09:04:54.302682 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1009 09:04:54.302699 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1009 09:04:54.302703 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1009 09:04:54.306413 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1009 09:04:54.306546 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306576 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1009 09:04:54.306595 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1009 09:04:54.306612 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1009 09:04:54.306628 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1009 09:04:54.306644 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1009 09:04:54.306454 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1009 09:04:54.308058 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad349e0d48a38c507dcb1c768798d416a612219858b1ec5d17eb1b6477b58828\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d37349874dd148054b4bd751c3954c847c5137a7e701295e474bfcbd44e6435\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.896058 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f06facc32fe07725c95a8fae07be6b5be9fa6c91d698c3729f845d6c46caa392\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e4b781cb9d7bc78e2bb6ab13cccdeb9b68425b6c6efafadc1ff7bd0e42fe390\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.903550 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.909821 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-zzrnh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"934ad2b3-3174-4135-be38-73a7f4dd6c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06087d95d69e7d97e697c2b19a634be508996de12371fcd05fdd74118d7a3e7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m6m7g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-zzrnh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.917212 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaca9555-f8a0-49e8-a266-5f2700cbf9e5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://431d9382a775d61fe64040d291ea0b11e4630c662a1e667161e3de6cab041027\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3daa27b2a2509659f618bb8072b3ed3e4897519b9fc45b0ff5773475d34fba1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-459hs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-b9p6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.925165 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"24326c29-1900-464d-b595-a686f669597e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c7888f2b18f0504ef93db6165499b06500b23b90bb32d2b916cfc6917a19a668\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://32b1fa9168e41f8b79d4030ece364a781d10deca4489c6f26417c54ed8410654\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://384b4c84649e76dc24c15119703bb04535fa37945c9438b031dce0d9ff522934\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51d334a7f58be224dcbf2dd13e31b9021859a5608fc0a514260be1d52d4b3513\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.932337 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1eee34f9-2622-40a1-a8b4-e8543d642fad\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5fc8f2eb3fc14287e7a31ee0bf390c36a0744a9d95e7924eee729f4df93089d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9898c2889f9a6045a8fe7b25dd8abbc4da70b547b547031167ef613827ca5922\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6fcd9e68107e1bf6c79d4d74d2069de37bcd60eed181f032371d47463ea6cf90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://208b302ef86806904caaf46782331845b3c0c67376a4f4747b3757a4899582e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://208b302ef86806904caaf46782331845b3c0c67376a4f4747b3757a4899582e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.944216 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5856af22-9535-479e-8bde-0c1ba9a699a8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33e9742ccbade317b6ab495d6fec629058fba4bc6cbc22b8fb403eca0d9b1f12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://029c6476ce646f47c6104164ce2bffe8ac68b2b65c594a25308345f8befdcfef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f35c2c8069414d1bbbf0bfe4e7020dbfa78d024ec43161db9a06fccdc23b6e55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://38153b764b0e0a4be98293a91d411c7a4e036de3963c35d426e02352568fecd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65b3efceddeaee218d95f885a5ae7a54983cc0746bcb1674484fd9089d21c98a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://15d0b78afb2dd6356645d2d3ab457694e493abbdb148dabd9b882edfac4b16a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://15d0b78afb2dd6356645d2d3ab457694e493abbdb148dabd9b882edfac4b16a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://307b09154fb85493f316ce8cc526df61f6043752a676b53428edafcbb5a9df21\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://307b09154fb85493f316ce8cc526df61f6043752a676b53428edafcbb5a9df21\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1babaa7a26416781a494eb627f2bb129d613ee5707d5ce69b9bfc570980640ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1babaa7a26416781a494eb627f2bb129d613ee5707d5ce69b9bfc570980640ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:36Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.953027 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://35d22725c9000a8fa139efde62dffa58ff86727bde49e6aa877faa0ffbd8c496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.960971 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.960993 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.961001 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.961012 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.961021 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:16Z","lastTransitionTime":"2025-10-09T09:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.964545 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0aae2f40-061f-4e34-abaa-11bafcd40ef6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:04:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-09T09:05:49Z\\\",\\\"message\\\":\\\"0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:05:49Z is after 2025-08-24T17:21:41Z]\\\\nI1009 09:05:49.475469 6664 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1009 09:05:49.475389 6664 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1009 09:05:49.475474 6664 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/multus-additional-cni-plugins-l4vw4\\\\nI1009 09:05:49.475418 6664 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/kube-rbac-proxy-crio-crc\\\\nI1009 09:05:49.475479 6664 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1009 09:05:4\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-09T09:05:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-mxql9_openshift-ovn-kubernetes(0aae2f40-061f-4e34-abaa-11bafcd40ef6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-09T09:05:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-09T09:04:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-09T09:04:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5vlxv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:04:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-mxql9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:16 crc kubenswrapper[4710]: I1009 09:06:16.970647 4710 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-p9sh6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-09T09:05:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-x2bc2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-09T09:05:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-p9sh6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-09T09:06:16Z is after 2025-08-24T17:21:41Z" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.063531 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.063559 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.063586 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.063596 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.063604 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:17Z","lastTransitionTime":"2025-10-09T09:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.164794 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.164824 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.164832 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.164845 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.164854 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:17Z","lastTransitionTime":"2025-10-09T09:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.266594 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.266618 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.266626 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.266637 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.266645 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:17Z","lastTransitionTime":"2025-10-09T09:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.368280 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.368308 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.368316 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.368327 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.368336 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:17Z","lastTransitionTime":"2025-10-09T09:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.469801 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.469830 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.469838 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.469850 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.469857 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:17Z","lastTransitionTime":"2025-10-09T09:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.571551 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.571578 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.571587 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.571598 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.571607 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:17Z","lastTransitionTime":"2025-10-09T09:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.673112 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.673165 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.673176 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.673186 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.673194 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:17Z","lastTransitionTime":"2025-10-09T09:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.774609 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.774637 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.774646 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.774657 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.774666 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:17Z","lastTransitionTime":"2025-10-09T09:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.876336 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.876361 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.876369 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.876379 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.876386 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:17Z","lastTransitionTime":"2025-10-09T09:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.978216 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.978252 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.978260 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.978269 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:17 crc kubenswrapper[4710]: I1009 09:06:17.978277 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:17Z","lastTransitionTime":"2025-10-09T09:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.079578 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.079605 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.079695 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.079847 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.079856 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:18Z","lastTransitionTime":"2025-10-09T09:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.181679 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.181710 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.181721 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.181732 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.181741 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:18Z","lastTransitionTime":"2025-10-09T09:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.283669 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.283721 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.283731 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.283744 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.283754 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:18Z","lastTransitionTime":"2025-10-09T09:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.385827 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.385861 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.385871 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.385883 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.385893 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:18Z","lastTransitionTime":"2025-10-09T09:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.487523 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.487546 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.487553 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.487562 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.487583 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:18Z","lastTransitionTime":"2025-10-09T09:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.589274 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.589300 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.589309 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.589320 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.589328 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:18Z","lastTransitionTime":"2025-10-09T09:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.690832 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.690850 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.690857 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.690865 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.690873 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:18Z","lastTransitionTime":"2025-10-09T09:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.792446 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.792467 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.792476 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.792485 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.792492 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:18Z","lastTransitionTime":"2025-10-09T09:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.814925 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.814972 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.814972 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.814993 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:18 crc kubenswrapper[4710]: E1009 09:06:18.815083 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:18 crc kubenswrapper[4710]: E1009 09:06:18.815206 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:18 crc kubenswrapper[4710]: E1009 09:06:18.815303 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:18 crc kubenswrapper[4710]: E1009 09:06:18.815341 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.894028 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.894059 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.894067 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.894077 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.894086 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:18Z","lastTransitionTime":"2025-10-09T09:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.995718 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.995761 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.995771 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.995784 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:18 crc kubenswrapper[4710]: I1009 09:06:18.995794 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:18Z","lastTransitionTime":"2025-10-09T09:06:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.097509 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.097538 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.097548 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.097559 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.097568 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:19Z","lastTransitionTime":"2025-10-09T09:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.198791 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.198816 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.198825 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.198835 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.198842 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:19Z","lastTransitionTime":"2025-10-09T09:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.300269 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.300301 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.300311 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.300325 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.300334 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:19Z","lastTransitionTime":"2025-10-09T09:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.402174 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.402236 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.402246 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.402259 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.402268 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:19Z","lastTransitionTime":"2025-10-09T09:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.504396 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.504426 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.504456 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.504470 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.504480 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:19Z","lastTransitionTime":"2025-10-09T09:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.606285 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.606323 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.606331 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.606341 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.606348 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:19Z","lastTransitionTime":"2025-10-09T09:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.708206 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.708242 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.708251 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.708260 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.708267 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:19Z","lastTransitionTime":"2025-10-09T09:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.810598 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.810633 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.810641 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.810650 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.810657 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:19Z","lastTransitionTime":"2025-10-09T09:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.912633 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.912738 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.912765 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.912778 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:19 crc kubenswrapper[4710]: I1009 09:06:19.912787 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:19Z","lastTransitionTime":"2025-10-09T09:06:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.014020 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.014048 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.014057 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.014069 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.014077 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:20Z","lastTransitionTime":"2025-10-09T09:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.114992 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.115036 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.115044 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.115056 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.115064 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:20Z","lastTransitionTime":"2025-10-09T09:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.216626 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.216655 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.216665 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.216677 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.216685 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:20Z","lastTransitionTime":"2025-10-09T09:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.318655 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.318682 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.318690 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.318702 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.318713 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:20Z","lastTransitionTime":"2025-10-09T09:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.420042 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.420066 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.420074 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.420083 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.420091 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:20Z","lastTransitionTime":"2025-10-09T09:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.521032 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.521057 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.521065 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.521074 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.521080 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:20Z","lastTransitionTime":"2025-10-09T09:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.622750 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.622783 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.622792 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.622804 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.622812 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:20Z","lastTransitionTime":"2025-10-09T09:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.724678 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.724702 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.724710 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.724721 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.724728 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:20Z","lastTransitionTime":"2025-10-09T09:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.814865 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.814915 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.814891 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.814869 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:20 crc kubenswrapper[4710]: E1009 09:06:20.814980 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:20 crc kubenswrapper[4710]: E1009 09:06:20.815070 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:20 crc kubenswrapper[4710]: E1009 09:06:20.815126 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:20 crc kubenswrapper[4710]: E1009 09:06:20.815165 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.825663 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.825690 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.825698 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.825708 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.825732 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:20Z","lastTransitionTime":"2025-10-09T09:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.927773 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.927797 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.927805 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.927815 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:20 crc kubenswrapper[4710]: I1009 09:06:20.927821 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:20Z","lastTransitionTime":"2025-10-09T09:06:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.029476 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.029502 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.029527 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.029539 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.029546 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:21Z","lastTransitionTime":"2025-10-09T09:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.130796 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.130850 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.130859 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.130868 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.130875 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:21Z","lastTransitionTime":"2025-10-09T09:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.232733 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.232756 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.232764 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.232773 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.232780 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:21Z","lastTransitionTime":"2025-10-09T09:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.334000 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.334023 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.334030 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.334039 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.334046 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:21Z","lastTransitionTime":"2025-10-09T09:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.435810 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.435835 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.435843 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.435852 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.435860 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:21Z","lastTransitionTime":"2025-10-09T09:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.537368 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.537533 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.537599 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.537669 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.537726 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:21Z","lastTransitionTime":"2025-10-09T09:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.639238 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.639451 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.639544 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.639611 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.639822 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:21Z","lastTransitionTime":"2025-10-09T09:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.742029 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.742193 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.742274 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.742340 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.742401 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:21Z","lastTransitionTime":"2025-10-09T09:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.844390 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.844423 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.844455 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.844468 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.844476 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:21Z","lastTransitionTime":"2025-10-09T09:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.946220 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.946264 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.946273 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.946285 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:21 crc kubenswrapper[4710]: I1009 09:06:21.946292 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:21Z","lastTransitionTime":"2025-10-09T09:06:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.048963 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.048986 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.048997 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.049007 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.049015 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:22Z","lastTransitionTime":"2025-10-09T09:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.151020 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.151074 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.151082 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.151093 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.151101 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:22Z","lastTransitionTime":"2025-10-09T09:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.253004 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.253030 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.253037 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.253046 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.253054 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:22Z","lastTransitionTime":"2025-10-09T09:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.355014 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.355042 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.355052 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.355062 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.355070 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:22Z","lastTransitionTime":"2025-10-09T09:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.456663 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.456695 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.456705 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.456716 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.456724 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:22Z","lastTransitionTime":"2025-10-09T09:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.558407 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.558462 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.558474 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.558487 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.558496 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:22Z","lastTransitionTime":"2025-10-09T09:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.660155 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.660179 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.660188 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.660198 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.660206 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:22Z","lastTransitionTime":"2025-10-09T09:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.762101 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.762143 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.762154 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.762172 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.762182 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:22Z","lastTransitionTime":"2025-10-09T09:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.814935 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.814969 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:22 crc kubenswrapper[4710]: E1009 09:06:22.815047 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.815067 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.815083 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:22 crc kubenswrapper[4710]: E1009 09:06:22.815140 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:22 crc kubenswrapper[4710]: E1009 09:06:22.815188 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:22 crc kubenswrapper[4710]: E1009 09:06:22.815243 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.863924 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.863958 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.863967 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.863976 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.863984 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:22Z","lastTransitionTime":"2025-10-09T09:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.966026 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.966055 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.966067 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.966077 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:22 crc kubenswrapper[4710]: I1009 09:06:22.966084 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:22Z","lastTransitionTime":"2025-10-09T09:06:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.067767 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.067794 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.067802 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.067813 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.067823 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:23Z","lastTransitionTime":"2025-10-09T09:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.169485 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.169506 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.169514 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.169523 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.169531 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:23Z","lastTransitionTime":"2025-10-09T09:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.271054 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.271075 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.271127 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.271139 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.271147 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:23Z","lastTransitionTime":"2025-10-09T09:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.372837 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.372875 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.372884 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.372899 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.372908 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:23Z","lastTransitionTime":"2025-10-09T09:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.474754 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.474793 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.474802 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.474811 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.474818 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:23Z","lastTransitionTime":"2025-10-09T09:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.579384 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.579412 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.579420 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.579457 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.579466 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:23Z","lastTransitionTime":"2025-10-09T09:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.680629 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.680711 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.680719 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.680728 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.680736 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:23Z","lastTransitionTime":"2025-10-09T09:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.781759 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.781821 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.781831 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.781848 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.781856 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:23Z","lastTransitionTime":"2025-10-09T09:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.883173 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.883193 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.883201 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.883210 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.883217 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:23Z","lastTransitionTime":"2025-10-09T09:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.984596 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.984625 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.984634 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.984643 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:23 crc kubenswrapper[4710]: I1009 09:06:23.984650 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:23Z","lastTransitionTime":"2025-10-09T09:06:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.085974 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.086028 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.086037 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.086050 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.086059 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:24Z","lastTransitionTime":"2025-10-09T09:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.187943 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.187973 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.187981 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.187993 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.188000 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:24Z","lastTransitionTime":"2025-10-09T09:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.289689 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.289707 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.289714 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.289723 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.289731 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:24Z","lastTransitionTime":"2025-10-09T09:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.391556 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.391577 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.391585 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.391594 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.391602 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:24Z","lastTransitionTime":"2025-10-09T09:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.493842 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.493870 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.493878 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.493888 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.493912 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:24Z","lastTransitionTime":"2025-10-09T09:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.595863 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.595895 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.595903 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.595918 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.595926 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:24Z","lastTransitionTime":"2025-10-09T09:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.697187 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.697210 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.697217 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.697251 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.697259 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:24Z","lastTransitionTime":"2025-10-09T09:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.799389 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.799423 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.799449 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.799460 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.799468 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:24Z","lastTransitionTime":"2025-10-09T09:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.814785 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.814802 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.814853 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.814897 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:24 crc kubenswrapper[4710]: E1009 09:06:24.814959 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:24 crc kubenswrapper[4710]: E1009 09:06:24.815184 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:24 crc kubenswrapper[4710]: E1009 09:06:24.815281 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:24 crc kubenswrapper[4710]: E1009 09:06:24.815243 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.901424 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.901466 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.901475 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.901486 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:24 crc kubenswrapper[4710]: I1009 09:06:24.901496 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:24Z","lastTransitionTime":"2025-10-09T09:06:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.003712 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.003752 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.003762 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.003774 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.003785 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:25Z","lastTransitionTime":"2025-10-09T09:06:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.105815 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.105837 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.105846 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.105855 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.105862 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:25Z","lastTransitionTime":"2025-10-09T09:06:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.207141 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.207169 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.207177 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.207187 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.207194 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:25Z","lastTransitionTime":"2025-10-09T09:06:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.309267 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.309298 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.309307 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.309336 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.309345 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:25Z","lastTransitionTime":"2025-10-09T09:06:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.410892 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.410916 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.410924 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.410935 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.410942 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:25Z","lastTransitionTime":"2025-10-09T09:06:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.512780 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.512807 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.512834 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.512845 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.512854 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:25Z","lastTransitionTime":"2025-10-09T09:06:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.614846 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.614876 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.614884 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.614896 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.614904 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:25Z","lastTransitionTime":"2025-10-09T09:06:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.716973 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.717018 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.717027 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.717040 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.717048 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:25Z","lastTransitionTime":"2025-10-09T09:06:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.818771 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.818803 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.818811 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.818821 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.818829 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:25Z","lastTransitionTime":"2025-10-09T09:06:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.920937 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.920968 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.920978 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.920990 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:25 crc kubenswrapper[4710]: I1009 09:06:25.920999 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:25Z","lastTransitionTime":"2025-10-09T09:06:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.023062 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.023101 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.023111 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.023125 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.023134 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:26Z","lastTransitionTime":"2025-10-09T09:06:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.124675 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.124820 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.124918 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.125093 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.125249 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:26Z","lastTransitionTime":"2025-10-09T09:06:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.227349 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.227378 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.227386 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.227396 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.227404 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:26Z","lastTransitionTime":"2025-10-09T09:06:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.330298 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.330378 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.330391 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.330405 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.330423 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:26Z","lastTransitionTime":"2025-10-09T09:06:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.432539 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.432649 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.432717 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.432773 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.432822 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:26Z","lastTransitionTime":"2025-10-09T09:06:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.534706 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.535036 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.535124 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.535184 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.535267 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:26Z","lastTransitionTime":"2025-10-09T09:06:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.637729 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.637786 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.637799 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.637821 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.637842 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:26Z","lastTransitionTime":"2025-10-09T09:06:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.739670 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.739700 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.739711 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.739725 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.739736 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:26Z","lastTransitionTime":"2025-10-09T09:06:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.760049 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.760077 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.760088 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.760101 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.760111 4710 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-09T09:06:26Z","lastTransitionTime":"2025-10-09T09:06:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.794123 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-r5m94"] Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.794540 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r5m94" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.795847 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.796056 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.796920 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.796962 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.814065 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.814122 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.814419 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:26 crc kubenswrapper[4710]: E1009 09:06:26.814525 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:26 crc kubenswrapper[4710]: E1009 09:06:26.814412 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.814455 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:26 crc kubenswrapper[4710]: E1009 09:06:26.814752 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:26 crc kubenswrapper[4710]: E1009 09:06:26.815062 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.819369 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=47.81935333 podStartE2EDuration="47.81935333s" podCreationTimestamp="2025-10-09 09:05:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:06:26.807351094 +0000 UTC m=+110.297459491" watchObservedRunningTime="2025-10-09 09:06:26.81935333 +0000 UTC m=+110.309461727" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.819508 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-5c9mg" podStartSLOduration=89.819504285 podStartE2EDuration="1m29.819504285s" podCreationTimestamp="2025-10-09 09:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:06:26.818827629 +0000 UTC m=+110.308936026" watchObservedRunningTime="2025-10-09 09:06:26.819504285 +0000 UTC m=+110.309612682" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.830381 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-l4vw4" podStartSLOduration=89.830362505 podStartE2EDuration="1m29.830362505s" podCreationTimestamp="2025-10-09 09:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:06:26.829996124 +0000 UTC m=+110.320104521" watchObservedRunningTime="2025-10-09 09:06:26.830362505 +0000 UTC m=+110.320470902" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.864671 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podStartSLOduration=89.86464201 podStartE2EDuration="1m29.86464201s" podCreationTimestamp="2025-10-09 09:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:06:26.864637261 +0000 UTC m=+110.354745658" watchObservedRunningTime="2025-10-09 09:06:26.86464201 +0000 UTC m=+110.354750407" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.884989 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/378a461d-1cc4-4ddd-a2e0-1578ab6b9334-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-r5m94\" (UID: \"378a461d-1cc4-4ddd-a2e0-1578ab6b9334\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r5m94" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.885033 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/378a461d-1cc4-4ddd-a2e0-1578ab6b9334-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-r5m94\" (UID: \"378a461d-1cc4-4ddd-a2e0-1578ab6b9334\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r5m94" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.885071 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/378a461d-1cc4-4ddd-a2e0-1578ab6b9334-service-ca\") pod \"cluster-version-operator-5c965bbfc6-r5m94\" (UID: \"378a461d-1cc4-4ddd-a2e0-1578ab6b9334\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r5m94" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.885117 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/378a461d-1cc4-4ddd-a2e0-1578ab6b9334-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-r5m94\" (UID: \"378a461d-1cc4-4ddd-a2e0-1578ab6b9334\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r5m94" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.885320 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/378a461d-1cc4-4ddd-a2e0-1578ab6b9334-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-r5m94\" (UID: \"378a461d-1cc4-4ddd-a2e0-1578ab6b9334\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r5m94" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.887619 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-hrfgb" podStartSLOduration=89.887603507 podStartE2EDuration="1m29.887603507s" podCreationTimestamp="2025-10-09 09:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:06:26.872023144 +0000 UTC m=+110.362131541" watchObservedRunningTime="2025-10-09 09:06:26.887603507 +0000 UTC m=+110.377711904" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.887798 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=92.887793816 podStartE2EDuration="1m32.887793816s" podCreationTimestamp="2025-10-09 09:04:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:06:26.887414802 +0000 UTC m=+110.377523199" watchObservedRunningTime="2025-10-09 09:06:26.887793816 +0000 UTC m=+110.377902212" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.919901 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-zzrnh" podStartSLOduration=89.91988767 podStartE2EDuration="1m29.91988767s" podCreationTimestamp="2025-10-09 09:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:06:26.919673065 +0000 UTC m=+110.409781463" watchObservedRunningTime="2025-10-09 09:06:26.91988767 +0000 UTC m=+110.409996067" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.928037 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-b9p6j" podStartSLOduration=88.928028918 podStartE2EDuration="1m28.928028918s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:06:26.927473261 +0000 UTC m=+110.417581657" watchObservedRunningTime="2025-10-09 09:06:26.928028918 +0000 UTC m=+110.418137315" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.951857 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=89.951842342 podStartE2EDuration="1m29.951842342s" podCreationTimestamp="2025-10-09 09:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:06:26.950671655 +0000 UTC m=+110.440780052" watchObservedRunningTime="2025-10-09 09:06:26.951842342 +0000 UTC m=+110.441950739" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.965014 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=59.964993695 podStartE2EDuration="59.964993695s" podCreationTimestamp="2025-10-09 09:05:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:06:26.964602457 +0000 UTC m=+110.454710854" watchObservedRunningTime="2025-10-09 09:06:26.964993695 +0000 UTC m=+110.455102092" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.984672 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=25.984653357 podStartE2EDuration="25.984653357s" podCreationTimestamp="2025-10-09 09:06:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:06:26.983073749 +0000 UTC m=+110.473182146" watchObservedRunningTime="2025-10-09 09:06:26.984653357 +0000 UTC m=+110.474761754" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.986614 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/378a461d-1cc4-4ddd-a2e0-1578ab6b9334-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-r5m94\" (UID: \"378a461d-1cc4-4ddd-a2e0-1578ab6b9334\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r5m94" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.986733 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/378a461d-1cc4-4ddd-a2e0-1578ab6b9334-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-r5m94\" (UID: \"378a461d-1cc4-4ddd-a2e0-1578ab6b9334\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r5m94" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.986882 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/378a461d-1cc4-4ddd-a2e0-1578ab6b9334-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-r5m94\" (UID: \"378a461d-1cc4-4ddd-a2e0-1578ab6b9334\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r5m94" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.986973 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/378a461d-1cc4-4ddd-a2e0-1578ab6b9334-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-r5m94\" (UID: \"378a461d-1cc4-4ddd-a2e0-1578ab6b9334\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r5m94" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.987074 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/378a461d-1cc4-4ddd-a2e0-1578ab6b9334-service-ca\") pod \"cluster-version-operator-5c965bbfc6-r5m94\" (UID: \"378a461d-1cc4-4ddd-a2e0-1578ab6b9334\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r5m94" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.987158 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/378a461d-1cc4-4ddd-a2e0-1578ab6b9334-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-r5m94\" (UID: \"378a461d-1cc4-4ddd-a2e0-1578ab6b9334\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r5m94" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.987082 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/378a461d-1cc4-4ddd-a2e0-1578ab6b9334-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-r5m94\" (UID: \"378a461d-1cc4-4ddd-a2e0-1578ab6b9334\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r5m94" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.987807 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/378a461d-1cc4-4ddd-a2e0-1578ab6b9334-service-ca\") pod \"cluster-version-operator-5c965bbfc6-r5m94\" (UID: \"378a461d-1cc4-4ddd-a2e0-1578ab6b9334\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r5m94" Oct 09 09:06:26 crc kubenswrapper[4710]: I1009 09:06:26.992194 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/378a461d-1cc4-4ddd-a2e0-1578ab6b9334-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-r5m94\" (UID: \"378a461d-1cc4-4ddd-a2e0-1578ab6b9334\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r5m94" Oct 09 09:06:27 crc kubenswrapper[4710]: I1009 09:06:27.000687 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/378a461d-1cc4-4ddd-a2e0-1578ab6b9334-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-r5m94\" (UID: \"378a461d-1cc4-4ddd-a2e0-1578ab6b9334\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r5m94" Oct 09 09:06:27 crc kubenswrapper[4710]: I1009 09:06:27.105028 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r5m94" Oct 09 09:06:27 crc kubenswrapper[4710]: I1009 09:06:27.173241 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r5m94" event={"ID":"378a461d-1cc4-4ddd-a2e0-1578ab6b9334","Type":"ContainerStarted","Data":"ab0e634295a62e99f9fe734cd02e6aa50865e76609060987f962784ec955f640"} Oct 09 09:06:28 crc kubenswrapper[4710]: I1009 09:06:28.176082 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r5m94" event={"ID":"378a461d-1cc4-4ddd-a2e0-1578ab6b9334","Type":"ContainerStarted","Data":"f4a03d11faca5cf3f6dba04db086890ea2302ea02675e5138208299aee3ad100"} Oct 09 09:06:28 crc kubenswrapper[4710]: I1009 09:06:28.186706 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-r5m94" podStartSLOduration=91.186693179 podStartE2EDuration="1m31.186693179s" podCreationTimestamp="2025-10-09 09:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:06:28.18654997 +0000 UTC m=+111.676658367" watchObservedRunningTime="2025-10-09 09:06:28.186693179 +0000 UTC m=+111.676801577" Oct 09 09:06:28 crc kubenswrapper[4710]: I1009 09:06:28.814411 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:28 crc kubenswrapper[4710]: I1009 09:06:28.814481 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:28 crc kubenswrapper[4710]: I1009 09:06:28.814585 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:28 crc kubenswrapper[4710]: E1009 09:06:28.814632 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:28 crc kubenswrapper[4710]: E1009 09:06:28.814802 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:28 crc kubenswrapper[4710]: E1009 09:06:28.814910 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:28 crc kubenswrapper[4710]: I1009 09:06:28.815201 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:28 crc kubenswrapper[4710]: E1009 09:06:28.815308 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:30 crc kubenswrapper[4710]: I1009 09:06:30.814610 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:30 crc kubenswrapper[4710]: I1009 09:06:30.814670 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:30 crc kubenswrapper[4710]: E1009 09:06:30.814711 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:30 crc kubenswrapper[4710]: I1009 09:06:30.814730 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:30 crc kubenswrapper[4710]: I1009 09:06:30.814739 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:30 crc kubenswrapper[4710]: E1009 09:06:30.814789 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:30 crc kubenswrapper[4710]: E1009 09:06:30.814842 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:30 crc kubenswrapper[4710]: E1009 09:06:30.814900 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:31 crc kubenswrapper[4710]: I1009 09:06:31.814808 4710 scope.go:117] "RemoveContainer" containerID="494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19" Oct 09 09:06:32 crc kubenswrapper[4710]: I1009 09:06:32.190519 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5c9mg_421bdfde-a7ad-4e4c-aa0d-624104899b94/kube-multus/1.log" Oct 09 09:06:32 crc kubenswrapper[4710]: I1009 09:06:32.190877 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5c9mg_421bdfde-a7ad-4e4c-aa0d-624104899b94/kube-multus/0.log" Oct 09 09:06:32 crc kubenswrapper[4710]: I1009 09:06:32.190914 4710 generic.go:334] "Generic (PLEG): container finished" podID="421bdfde-a7ad-4e4c-aa0d-624104899b94" containerID="5954ec1339b6e50d0c48c43be96a3a54b58d4fe715bdc47b91aa80b107aad7b9" exitCode=1 Oct 09 09:06:32 crc kubenswrapper[4710]: I1009 09:06:32.190965 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5c9mg" event={"ID":"421bdfde-a7ad-4e4c-aa0d-624104899b94","Type":"ContainerDied","Data":"5954ec1339b6e50d0c48c43be96a3a54b58d4fe715bdc47b91aa80b107aad7b9"} Oct 09 09:06:32 crc kubenswrapper[4710]: I1009 09:06:32.190997 4710 scope.go:117] "RemoveContainer" containerID="c4de0c1e3cb1d5f11c239bbfcf982a64c6260c9b300a6781fa802c75987453c5" Oct 09 09:06:32 crc kubenswrapper[4710]: I1009 09:06:32.191320 4710 scope.go:117] "RemoveContainer" containerID="5954ec1339b6e50d0c48c43be96a3a54b58d4fe715bdc47b91aa80b107aad7b9" Oct 09 09:06:32 crc kubenswrapper[4710]: E1009 09:06:32.191479 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-5c9mg_openshift-multus(421bdfde-a7ad-4e4c-aa0d-624104899b94)\"" pod="openshift-multus/multus-5c9mg" podUID="421bdfde-a7ad-4e4c-aa0d-624104899b94" Oct 09 09:06:32 crc kubenswrapper[4710]: I1009 09:06:32.193663 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mxql9_0aae2f40-061f-4e34-abaa-11bafcd40ef6/ovnkube-controller/3.log" Oct 09 09:06:32 crc kubenswrapper[4710]: I1009 09:06:32.195683 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerStarted","Data":"c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99"} Oct 09 09:06:32 crc kubenswrapper[4710]: I1009 09:06:32.196117 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:06:32 crc kubenswrapper[4710]: I1009 09:06:32.224696 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" podStartSLOduration=94.224681434 podStartE2EDuration="1m34.224681434s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:06:32.224026609 +0000 UTC m=+115.714135006" watchObservedRunningTime="2025-10-09 09:06:32.224681434 +0000 UTC m=+115.714789830" Oct 09 09:06:32 crc kubenswrapper[4710]: I1009 09:06:32.432666 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-p9sh6"] Oct 09 09:06:32 crc kubenswrapper[4710]: I1009 09:06:32.432760 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:32 crc kubenswrapper[4710]: E1009 09:06:32.432846 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:32 crc kubenswrapper[4710]: I1009 09:06:32.814256 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:32 crc kubenswrapper[4710]: I1009 09:06:32.814351 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:32 crc kubenswrapper[4710]: I1009 09:06:32.814369 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:32 crc kubenswrapper[4710]: E1009 09:06:32.814547 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:32 crc kubenswrapper[4710]: E1009 09:06:32.814540 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:32 crc kubenswrapper[4710]: E1009 09:06:32.814632 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:33 crc kubenswrapper[4710]: I1009 09:06:33.198790 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5c9mg_421bdfde-a7ad-4e4c-aa0d-624104899b94/kube-multus/1.log" Oct 09 09:06:34 crc kubenswrapper[4710]: I1009 09:06:34.814512 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:34 crc kubenswrapper[4710]: I1009 09:06:34.814574 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:34 crc kubenswrapper[4710]: I1009 09:06:34.814584 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:34 crc kubenswrapper[4710]: E1009 09:06:34.814653 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:34 crc kubenswrapper[4710]: I1009 09:06:34.814820 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:34 crc kubenswrapper[4710]: E1009 09:06:34.814878 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:34 crc kubenswrapper[4710]: E1009 09:06:34.815005 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:34 crc kubenswrapper[4710]: E1009 09:06:34.815110 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:36 crc kubenswrapper[4710]: I1009 09:06:36.814482 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:36 crc kubenswrapper[4710]: I1009 09:06:36.814481 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:36 crc kubenswrapper[4710]: I1009 09:06:36.814519 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:36 crc kubenswrapper[4710]: I1009 09:06:36.814497 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:36 crc kubenswrapper[4710]: E1009 09:06:36.815292 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:36 crc kubenswrapper[4710]: E1009 09:06:36.815425 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:36 crc kubenswrapper[4710]: E1009 09:06:36.815488 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:36 crc kubenswrapper[4710]: E1009 09:06:36.815527 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:36 crc kubenswrapper[4710]: E1009 09:06:36.817368 4710 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Oct 09 09:06:36 crc kubenswrapper[4710]: E1009 09:06:36.880543 4710 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 09 09:06:38 crc kubenswrapper[4710]: I1009 09:06:38.814902 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:38 crc kubenswrapper[4710]: E1009 09:06:38.815206 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:38 crc kubenswrapper[4710]: I1009 09:06:38.815252 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:38 crc kubenswrapper[4710]: I1009 09:06:38.815281 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:38 crc kubenswrapper[4710]: E1009 09:06:38.815365 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:38 crc kubenswrapper[4710]: E1009 09:06:38.815416 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:38 crc kubenswrapper[4710]: I1009 09:06:38.815475 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:38 crc kubenswrapper[4710]: E1009 09:06:38.815530 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:40 crc kubenswrapper[4710]: I1009 09:06:40.814181 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:40 crc kubenswrapper[4710]: I1009 09:06:40.814240 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:40 crc kubenswrapper[4710]: E1009 09:06:40.814480 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:40 crc kubenswrapper[4710]: I1009 09:06:40.814261 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:40 crc kubenswrapper[4710]: I1009 09:06:40.814240 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:40 crc kubenswrapper[4710]: E1009 09:06:40.814556 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:40 crc kubenswrapper[4710]: E1009 09:06:40.814603 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:40 crc kubenswrapper[4710]: E1009 09:06:40.814664 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:41 crc kubenswrapper[4710]: E1009 09:06:41.882070 4710 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 09 09:06:42 crc kubenswrapper[4710]: I1009 09:06:42.814314 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:42 crc kubenswrapper[4710]: I1009 09:06:42.814338 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:42 crc kubenswrapper[4710]: I1009 09:06:42.814357 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:42 crc kubenswrapper[4710]: E1009 09:06:42.814486 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:42 crc kubenswrapper[4710]: E1009 09:06:42.814562 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:42 crc kubenswrapper[4710]: E1009 09:06:42.814628 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:42 crc kubenswrapper[4710]: I1009 09:06:42.814692 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:42 crc kubenswrapper[4710]: E1009 09:06:42.814807 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:44 crc kubenswrapper[4710]: I1009 09:06:44.814815 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:44 crc kubenswrapper[4710]: I1009 09:06:44.814841 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:44 crc kubenswrapper[4710]: E1009 09:06:44.814928 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:44 crc kubenswrapper[4710]: I1009 09:06:44.814952 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:44 crc kubenswrapper[4710]: E1009 09:06:44.815034 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:44 crc kubenswrapper[4710]: E1009 09:06:44.815104 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:44 crc kubenswrapper[4710]: I1009 09:06:44.815417 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:44 crc kubenswrapper[4710]: E1009 09:06:44.815496 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:45 crc kubenswrapper[4710]: I1009 09:06:45.814087 4710 scope.go:117] "RemoveContainer" containerID="5954ec1339b6e50d0c48c43be96a3a54b58d4fe715bdc47b91aa80b107aad7b9" Oct 09 09:06:46 crc kubenswrapper[4710]: I1009 09:06:46.227967 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5c9mg_421bdfde-a7ad-4e4c-aa0d-624104899b94/kube-multus/1.log" Oct 09 09:06:46 crc kubenswrapper[4710]: I1009 09:06:46.228166 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5c9mg" event={"ID":"421bdfde-a7ad-4e4c-aa0d-624104899b94","Type":"ContainerStarted","Data":"40509c86ec1f18b2ce622b6a0ac051e48bf00502c6ebbe86a51d732646149c0c"} Oct 09 09:06:46 crc kubenswrapper[4710]: I1009 09:06:46.814329 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:46 crc kubenswrapper[4710]: I1009 09:06:46.814388 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:46 crc kubenswrapper[4710]: I1009 09:06:46.814402 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:46 crc kubenswrapper[4710]: I1009 09:06:46.815153 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:46 crc kubenswrapper[4710]: E1009 09:06:46.815150 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:46 crc kubenswrapper[4710]: E1009 09:06:46.815216 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:46 crc kubenswrapper[4710]: E1009 09:06:46.815300 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:46 crc kubenswrapper[4710]: E1009 09:06:46.815349 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:46 crc kubenswrapper[4710]: E1009 09:06:46.882396 4710 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 09 09:06:48 crc kubenswrapper[4710]: I1009 09:06:48.814447 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:48 crc kubenswrapper[4710]: I1009 09:06:48.814998 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:48 crc kubenswrapper[4710]: I1009 09:06:48.815135 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:48 crc kubenswrapper[4710]: I1009 09:06:48.815189 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:48 crc kubenswrapper[4710]: E1009 09:06:48.815219 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:48 crc kubenswrapper[4710]: E1009 09:06:48.815138 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:48 crc kubenswrapper[4710]: E1009 09:06:48.815424 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:48 crc kubenswrapper[4710]: E1009 09:06:48.815494 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:50 crc kubenswrapper[4710]: I1009 09:06:50.814511 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:50 crc kubenswrapper[4710]: I1009 09:06:50.814581 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:50 crc kubenswrapper[4710]: E1009 09:06:50.814598 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 09 09:06:50 crc kubenswrapper[4710]: I1009 09:06:50.814672 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:50 crc kubenswrapper[4710]: E1009 09:06:50.814688 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 09 09:06:50 crc kubenswrapper[4710]: I1009 09:06:50.814711 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:50 crc kubenswrapper[4710]: E1009 09:06:50.814770 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-p9sh6" podUID="b237d61d-3d37-4b76-afa3-d5fe7119b0b6" Oct 09 09:06:50 crc kubenswrapper[4710]: E1009 09:06:50.814820 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 09 09:06:52 crc kubenswrapper[4710]: I1009 09:06:52.814184 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:06:52 crc kubenswrapper[4710]: I1009 09:06:52.814247 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:06:52 crc kubenswrapper[4710]: I1009 09:06:52.814314 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:06:52 crc kubenswrapper[4710]: I1009 09:06:52.814316 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:06:52 crc kubenswrapper[4710]: I1009 09:06:52.816101 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 09 09:06:52 crc kubenswrapper[4710]: I1009 09:06:52.816486 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 09 09:06:52 crc kubenswrapper[4710]: I1009 09:06:52.816501 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 09 09:06:52 crc kubenswrapper[4710]: I1009 09:06:52.816966 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 09 09:06:52 crc kubenswrapper[4710]: I1009 09:06:52.817714 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 09 09:06:52 crc kubenswrapper[4710]: I1009 09:06:52.817800 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.339807 4710 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.362570 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-8598s"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.363002 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.369867 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.369887 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.370057 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.370201 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.370242 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.370335 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.370462 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.370566 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.370574 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.370884 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.370899 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-2k94j"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.371501 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-2k94j" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.371833 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dm7lm"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.372044 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dm7lm" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.373037 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.373055 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lgdvx"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.373560 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.375054 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7d642"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.375330 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.376975 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-7h4pp"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.377277 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-7h4pp" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.378968 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.379239 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.379614 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-wt2d2"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.379981 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wt2d2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.380181 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.381633 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.400568 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.400608 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4zp57"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.400613 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.400570 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.400681 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.400696 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.400970 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4zp57" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.401317 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.401945 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.403857 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.404190 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.404203 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.404239 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.404293 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.404759 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.405846 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.406400 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-69jvv"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.406821 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-69jvv" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.408399 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.408523 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.409898 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.409930 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.409963 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.409993 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.409964 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.409997 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.409931 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.410037 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.410071 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.410135 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.410260 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.411320 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.412274 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.413246 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.413255 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.414069 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.414193 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.414424 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2wlbg"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.414848 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-2wlbg" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.415000 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.415128 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.415170 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.415131 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.417144 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.417201 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.417370 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.417383 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.417406 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.417376 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.417494 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.417531 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.417561 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.417445 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.417639 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.417662 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.417688 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.417718 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.417733 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.417763 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.418051 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.418377 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-5q44l"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.418732 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.418908 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.419056 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.419290 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.419553 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.419729 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.419908 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.420148 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.426877 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.430811 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.434821 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-5bjcn"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.435245 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tmzpb"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.435514 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tmzpb" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.435528 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-bmqbd"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.435584 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.435717 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5bjcn" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.435785 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-bmqbd" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.436308 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.437904 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-48qbf"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.438538 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.438794 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rs7x"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.439389 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rs7x" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.441884 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.442150 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.444009 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.444119 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.444864 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.445335 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.445443 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.449863 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dssr9"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.450177 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-klx5p"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.450493 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f4bff"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.450755 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f4bff" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.450919 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dssr9" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.451045 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-klx5p" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.451313 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.451400 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.451499 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.451566 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.451653 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.451727 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.451828 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.451923 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.451329 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.451355 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.452965 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.451379 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.453086 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.453208 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-tvt5b"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.453361 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.453621 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-tvt5b" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.453752 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-qp498"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.454034 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-qp498" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.454759 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l9bsw"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.455042 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.456613 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.464289 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-l57dg"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.464143 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.469004 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.490918 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.492244 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.492640 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-2zqzn"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.493065 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xc46h"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.493353 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-qdh9l"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.493749 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2zqzn" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.493355 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.493902 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-qdh9l" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.493994 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-l57dg" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.494112 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xc46h" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.494442 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fkhwq"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.494754 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.494846 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fkhwq" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.494987 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-2k94j"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.496366 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-vxwmd"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.496789 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fbsvl"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.496963 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vxwmd" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.497041 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7cjtd"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.497397 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.497051 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fbsvl" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.497712 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5j8t6"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.498104 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5j8t6" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.498817 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dgsrr"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.499202 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dgsrr" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.499253 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.500500 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dm7lm"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.500555 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.501792 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-98nmz"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.505979 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.506337 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-9lzhs"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.506521 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.506524 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-98nmz" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.506833 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7d642"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.506859 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-8598s"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.506869 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.507012 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.507662 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-9lzhs" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.507840 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-48qbf"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.508882 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-ckn5c"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.509450 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-ckn5c" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.510041 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-7h4pp"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.512473 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-klx5p"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.512498 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rs7x"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.514486 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4zp57"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.514864 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.516069 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f4bff"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.516187 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-encryption-config\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517494 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jxfg\" (UniqueName: \"kubernetes.io/projected/53565bc1-575c-4410-aaea-f6016117621f-kube-api-access-4jxfg\") pod \"authentication-operator-69f744f599-7h4pp\" (UID: \"53565bc1-575c-4410-aaea-f6016117621f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7h4pp" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517519 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16efdb77-5db5-410b-9f59-aed6293dbcab-config\") pod \"machine-approver-56656f9798-wt2d2\" (UID: \"16efdb77-5db5-410b-9f59-aed6293dbcab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wt2d2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517536 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/16efdb77-5db5-410b-9f59-aed6293dbcab-auth-proxy-config\") pod \"machine-approver-56656f9798-wt2d2\" (UID: \"16efdb77-5db5-410b-9f59-aed6293dbcab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wt2d2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517549 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mb7kr\" (UniqueName: \"kubernetes.io/projected/16efdb77-5db5-410b-9f59-aed6293dbcab-kube-api-access-mb7kr\") pod \"machine-approver-56656f9798-wt2d2\" (UID: \"16efdb77-5db5-410b-9f59-aed6293dbcab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wt2d2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517574 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gj84\" (UniqueName: \"kubernetes.io/projected/d682c234-918c-4189-91bb-09e90f9da4db-kube-api-access-5gj84\") pod \"dns-operator-744455d44c-69jvv\" (UID: \"d682c234-918c-4189-91bb-09e90f9da4db\") " pod="openshift-dns-operator/dns-operator-744455d44c-69jvv" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517590 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0ee99015-bffc-4ffb-a91c-f941cd33acd7-console-oauth-config\") pod \"console-f9d7485db-5q44l\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517603 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3193888-6214-44cb-a0bc-0091046b80c2-config\") pod \"machine-api-operator-5694c8668f-2k94j\" (UID: \"c3193888-6214-44cb-a0bc-0091046b80c2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k94j" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517617 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-audit-policies\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517631 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517644 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bbk7\" (UniqueName: \"kubernetes.io/projected/e6867809-73e3-4291-97d4-cb38b0aeae7b-kube-api-access-6bbk7\") pod \"controller-manager-879f6c89f-lgdvx\" (UID: \"e6867809-73e3-4291-97d4-cb38b0aeae7b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517659 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-oauth-serving-cert\") pod \"console-f9d7485db-5q44l\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517672 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-config\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517685 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae732ae0-1a69-47f0-8895-a4a9ef61ae1a-config\") pod \"console-operator-58897d9998-2wlbg\" (UID: \"ae732ae0-1a69-47f0-8895-a4a9ef61ae1a\") " pod="openshift-console-operator/console-operator-58897d9998-2wlbg" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517698 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsbmm\" (UniqueName: \"kubernetes.io/projected/6fc18397-5fbe-41c5-a28a-eb550cc22dcd-kube-api-access-rsbmm\") pod \"cluster-samples-operator-665b6dd947-4zp57\" (UID: \"6fc18397-5fbe-41c5-a28a-eb550cc22dcd\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4zp57" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517720 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrxrv\" (UniqueName: \"kubernetes.io/projected/ae732ae0-1a69-47f0-8895-a4a9ef61ae1a-kube-api-access-lrxrv\") pod \"console-operator-58897d9998-2wlbg\" (UID: \"ae732ae0-1a69-47f0-8895-a4a9ef61ae1a\") " pod="openshift-console-operator/console-operator-58897d9998-2wlbg" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517735 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53565bc1-575c-4410-aaea-f6016117621f-service-ca-bundle\") pod \"authentication-operator-69f744f599-7h4pp\" (UID: \"53565bc1-575c-4410-aaea-f6016117621f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7h4pp" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517749 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9szvm\" (UniqueName: \"kubernetes.io/projected/58d085ee-1389-48a6-b185-a036265014d2-kube-api-access-9szvm\") pod \"route-controller-manager-6576b87f9c-5gtj2\" (UID: \"58d085ee-1389-48a6-b185-a036265014d2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517767 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517784 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-etcd-client\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517799 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e6867809-73e3-4291-97d4-cb38b0aeae7b-client-ca\") pod \"controller-manager-879f6c89f-lgdvx\" (UID: \"e6867809-73e3-4291-97d4-cb38b0aeae7b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517819 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6867809-73e3-4291-97d4-cb38b0aeae7b-config\") pod \"controller-manager-879f6c89f-lgdvx\" (UID: \"e6867809-73e3-4291-97d4-cb38b0aeae7b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517831 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-node-pullsecrets\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517843 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58d085ee-1389-48a6-b185-a036265014d2-config\") pod \"route-controller-manager-6576b87f9c-5gtj2\" (UID: \"58d085ee-1389-48a6-b185-a036265014d2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517857 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/58d085ee-1389-48a6-b185-a036265014d2-client-ca\") pod \"route-controller-manager-6576b87f9c-5gtj2\" (UID: \"58d085ee-1389-48a6-b185-a036265014d2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517870 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7s5rk\" (UniqueName: \"kubernetes.io/projected/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-kube-api-access-7s5rk\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517884 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-service-ca\") pod \"console-f9d7485db-5q44l\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517897 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ae732ae0-1a69-47f0-8895-a4a9ef61ae1a-serving-cert\") pod \"console-operator-58897d9998-2wlbg\" (UID: \"ae732ae0-1a69-47f0-8895-a4a9ef61ae1a\") " pod="openshift-console-operator/console-operator-58897d9998-2wlbg" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517915 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517931 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517945 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517957 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-audit\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517970 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6867809-73e3-4291-97d4-cb38b0aeae7b-serving-cert\") pod \"controller-manager-879f6c89f-lgdvx\" (UID: \"e6867809-73e3-4291-97d4-cb38b0aeae7b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517984 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-encryption-config\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.517999 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518013 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e0e1068-6e20-498c-a8b0-c61513824a86-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-dm7lm\" (UID: \"4e0e1068-6e20-498c-a8b0-c61513824a86\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dm7lm" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518028 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518041 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518054 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2l4md\" (UniqueName: \"kubernetes.io/projected/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-kube-api-access-2l4md\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518070 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4q5rc\" (UniqueName: \"kubernetes.io/projected/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-kube-api-access-4q5rc\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518082 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518103 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d682c234-918c-4189-91bb-09e90f9da4db-metrics-tls\") pod \"dns-operator-744455d44c-69jvv\" (UID: \"d682c234-918c-4189-91bb-09e90f9da4db\") " pod="openshift-dns-operator/dns-operator-744455d44c-69jvv" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518117 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/c3193888-6214-44cb-a0bc-0091046b80c2-images\") pod \"machine-api-operator-5694c8668f-2k94j\" (UID: \"c3193888-6214-44cb-a0bc-0091046b80c2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k94j" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518129 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/c3193888-6214-44cb-a0bc-0091046b80c2-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-2k94j\" (UID: \"c3193888-6214-44cb-a0bc-0091046b80c2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k94j" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518142 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518155 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9bxg\" (UniqueName: \"kubernetes.io/projected/0ee99015-bffc-4ffb-a91c-f941cd33acd7-kube-api-access-m9bxg\") pod \"console-f9d7485db-5q44l\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518168 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-etcd-serving-ca\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518179 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-audit-dir\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518191 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58d085ee-1389-48a6-b185-a036265014d2-serving-cert\") pod \"route-controller-manager-6576b87f9c-5gtj2\" (UID: \"58d085ee-1389-48a6-b185-a036265014d2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518205 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-etcd-client\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518246 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518259 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53565bc1-575c-4410-aaea-f6016117621f-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-7h4pp\" (UID: \"53565bc1-575c-4410-aaea-f6016117621f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7h4pp" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518274 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53565bc1-575c-4410-aaea-f6016117621f-serving-cert\") pod \"authentication-operator-69f744f599-7h4pp\" (UID: \"53565bc1-575c-4410-aaea-f6016117621f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7h4pp" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518289 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gcdm\" (UniqueName: \"kubernetes.io/projected/c3193888-6214-44cb-a0bc-0091046b80c2-kube-api-access-7gcdm\") pod \"machine-api-operator-5694c8668f-2k94j\" (UID: \"c3193888-6214-44cb-a0bc-0091046b80c2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k94j" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518303 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518315 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-console-config\") pod \"console-f9d7485db-5q44l\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518329 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-audit-dir\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518342 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-serving-cert\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518353 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-5q44l"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518375 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7cjtd"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518384 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-69jvv"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518355 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-trusted-ca-bundle\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518580 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-trusted-ca-bundle\") pod \"console-f9d7485db-5q44l\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518599 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzfm8\" (UniqueName: \"kubernetes.io/projected/4e0e1068-6e20-498c-a8b0-c61513824a86-kube-api-access-zzfm8\") pod \"openshift-apiserver-operator-796bbdcf4f-dm7lm\" (UID: \"4e0e1068-6e20-498c-a8b0-c61513824a86\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dm7lm" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518614 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53565bc1-575c-4410-aaea-f6016117621f-config\") pod \"authentication-operator-69f744f599-7h4pp\" (UID: \"53565bc1-575c-4410-aaea-f6016117621f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7h4pp" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518627 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/6fc18397-5fbe-41c5-a28a-eb550cc22dcd-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4zp57\" (UID: \"6fc18397-5fbe-41c5-a28a-eb550cc22dcd\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4zp57" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518664 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-audit-dir\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518679 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-audit-policies\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518692 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e6867809-73e3-4291-97d4-cb38b0aeae7b-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-lgdvx\" (UID: \"e6867809-73e3-4291-97d4-cb38b0aeae7b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518707 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ae732ae0-1a69-47f0-8895-a4a9ef61ae1a-trusted-ca\") pod \"console-operator-58897d9998-2wlbg\" (UID: \"ae732ae0-1a69-47f0-8895-a4a9ef61ae1a\") " pod="openshift-console-operator/console-operator-58897d9998-2wlbg" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518720 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/16efdb77-5db5-410b-9f59-aed6293dbcab-machine-approver-tls\") pod \"machine-approver-56656f9798-wt2d2\" (UID: \"16efdb77-5db5-410b-9f59-aed6293dbcab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wt2d2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518741 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-serving-cert\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518756 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-image-import-ca\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518776 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518791 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e0e1068-6e20-498c-a8b0-c61513824a86-config\") pod \"openshift-apiserver-operator-796bbdcf4f-dm7lm\" (UID: \"4e0e1068-6e20-498c-a8b0-c61513824a86\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dm7lm" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.518815 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0ee99015-bffc-4ffb-a91c-f941cd33acd7-console-serving-cert\") pod \"console-f9d7485db-5q44l\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.520351 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-l57dg"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.522007 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lgdvx"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.522788 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xc46h"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.523815 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-5bjcn"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.525468 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2wlbg"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.525581 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.526695 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-tvt5b"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.527896 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fkhwq"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.528290 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tmzpb"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.529655 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-vxwmd"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.529909 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-ckn5c"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.537469 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dssr9"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.538832 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-vzjfq"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.539872 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-vzjfq" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.542532 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-wng2k"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.543229 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5j8t6"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.543413 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-wng2k" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.545310 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-9lzhs"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.549553 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dgsrr"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.550336 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-2zqzn"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.551284 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l9bsw"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.552149 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-qdh9l"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.553191 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fbsvl"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.554202 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-bmqbd"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.555193 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-vzjfq"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.557099 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.558088 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-98nmz"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.558834 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.559051 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.564221 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.580047 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-nnk8n"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.581931 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.585004 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.587273 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-nnk8n"] Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.604220 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619266 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/4d30e7c6-48d6-49dd-b6cc-f983d70eecd0-srv-cert\") pod \"catalog-operator-68c6474976-fbsvl\" (UID: \"4d30e7c6-48d6-49dd-b6cc-f983d70eecd0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fbsvl" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619298 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619358 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-console-config\") pod \"console-f9d7485db-5q44l\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619377 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fc2e287d-2f35-40d9-ac9a-e8a67e3ea01e-proxy-tls\") pod \"machine-config-controller-84d6567774-klx5p\" (UID: \"fc2e287d-2f35-40d9-ac9a-e8a67e3ea01e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-klx5p" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619393 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-audit-dir\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619408 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-serving-cert\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619422 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-trusted-ca-bundle\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619452 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53565bc1-575c-4410-aaea-f6016117621f-config\") pod \"authentication-operator-69f744f599-7h4pp\" (UID: \"53565bc1-575c-4410-aaea-f6016117621f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7h4pp" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619467 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlxgs\" (UniqueName: \"kubernetes.io/projected/5edaffb3-e33b-45b9-89fa-39322022da37-kube-api-access-mlxgs\") pod \"downloads-7954f5f757-bmqbd\" (UID: \"5edaffb3-e33b-45b9-89fa-39322022da37\") " pod="openshift-console/downloads-7954f5f757-bmqbd" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619481 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ae732ae0-1a69-47f0-8895-a4a9ef61ae1a-trusted-ca\") pod \"console-operator-58897d9998-2wlbg\" (UID: \"ae732ae0-1a69-47f0-8895-a4a9ef61ae1a\") " pod="openshift-console-operator/console-operator-58897d9998-2wlbg" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619494 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-serving-cert\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619509 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-audit-dir\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619523 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9kz2\" (UniqueName: \"kubernetes.io/projected/862286d9-08dd-4330-99ed-04e3b17f2a5b-kube-api-access-l9kz2\") pod \"olm-operator-6b444d44fb-dgsrr\" (UID: \"862286d9-08dd-4330-99ed-04e3b17f2a5b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dgsrr" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619539 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619552 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/4d30e7c6-48d6-49dd-b6cc-f983d70eecd0-profile-collector-cert\") pod \"catalog-operator-68c6474976-fbsvl\" (UID: \"4d30e7c6-48d6-49dd-b6cc-f983d70eecd0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fbsvl" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619567 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cf39a416-5cb7-4d33-b794-09fb70b25f4a-proxy-tls\") pod \"machine-config-operator-74547568cd-2zqzn\" (UID: \"cf39a416-5cb7-4d33-b794-09fb70b25f4a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2zqzn" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619582 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/7af80181-f4a1-4112-8792-87f958d2f22e-etcd-ca\") pod \"etcd-operator-b45778765-48qbf\" (UID: \"7af80181-f4a1-4112-8792-87f958d2f22e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619596 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mb7kr\" (UniqueName: \"kubernetes.io/projected/16efdb77-5db5-410b-9f59-aed6293dbcab-kube-api-access-mb7kr\") pod \"machine-approver-56656f9798-wt2d2\" (UID: \"16efdb77-5db5-410b-9f59-aed6293dbcab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wt2d2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619611 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0ee99015-bffc-4ffb-a91c-f941cd33acd7-console-oauth-config\") pod \"console-f9d7485db-5q44l\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619624 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3193888-6214-44cb-a0bc-0091046b80c2-config\") pod \"machine-api-operator-5694c8668f-2k94j\" (UID: \"c3193888-6214-44cb-a0bc-0091046b80c2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k94j" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619640 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1dcbcf7f-d600-4279-a14f-edf6226c25e2-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-xc46h\" (UID: \"1dcbcf7f-d600-4279-a14f-edf6226c25e2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xc46h" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619655 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619669 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bbk7\" (UniqueName: \"kubernetes.io/projected/e6867809-73e3-4291-97d4-cb38b0aeae7b-kube-api-access-6bbk7\") pod \"controller-manager-879f6c89f-lgdvx\" (UID: \"e6867809-73e3-4291-97d4-cb38b0aeae7b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619682 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-audit-policies\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619696 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae732ae0-1a69-47f0-8895-a4a9ef61ae1a-config\") pod \"console-operator-58897d9998-2wlbg\" (UID: \"ae732ae0-1a69-47f0-8895-a4a9ef61ae1a\") " pod="openshift-console-operator/console-operator-58897d9998-2wlbg" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619711 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsbmm\" (UniqueName: \"kubernetes.io/projected/6fc18397-5fbe-41c5-a28a-eb550cc22dcd-kube-api-access-rsbmm\") pod \"cluster-samples-operator-665b6dd947-4zp57\" (UID: \"6fc18397-5fbe-41c5-a28a-eb550cc22dcd\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4zp57" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619717 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-audit-dir\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619729 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrxrv\" (UniqueName: \"kubernetes.io/projected/ae732ae0-1a69-47f0-8895-a4a9ef61ae1a-kube-api-access-lrxrv\") pod \"console-operator-58897d9998-2wlbg\" (UID: \"ae732ae0-1a69-47f0-8895-a4a9ef61ae1a\") " pod="openshift-console-operator/console-operator-58897d9998-2wlbg" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619872 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53565bc1-575c-4410-aaea-f6016117621f-service-ca-bundle\") pod \"authentication-operator-69f744f599-7h4pp\" (UID: \"53565bc1-575c-4410-aaea-f6016117621f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7h4pp" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619893 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9szvm\" (UniqueName: \"kubernetes.io/projected/58d085ee-1389-48a6-b185-a036265014d2-kube-api-access-9szvm\") pod \"route-controller-manager-6576b87f9c-5gtj2\" (UID: \"58d085ee-1389-48a6-b185-a036265014d2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619954 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-audit-dir\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.619963 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cf39a416-5cb7-4d33-b794-09fb70b25f4a-auth-proxy-config\") pod \"machine-config-operator-74547568cd-2zqzn\" (UID: \"cf39a416-5cb7-4d33-b794-09fb70b25f4a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2zqzn" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620025 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-etcd-client\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620048 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/4f054e02-d261-4d4e-9333-b7c469374c24-signing-cabundle\") pod \"service-ca-9c57cc56f-9lzhs\" (UID: \"4f054e02-d261-4d4e-9333-b7c469374c24\") " pod="openshift-service-ca/service-ca-9c57cc56f-9lzhs" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620068 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c3efa77-9487-4940-a894-e8a10f0c9453-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dssr9\" (UID: \"9c3efa77-9487-4940-a894-e8a10f0c9453\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dssr9" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620085 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d4f9a31b-650d-46d5-b8d3-9176e2048beb-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7cjtd\" (UID: \"d4f9a31b-650d-46d5-b8d3-9176e2048beb\") " pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620108 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/58d085ee-1389-48a6-b185-a036265014d2-client-ca\") pod \"route-controller-manager-6576b87f9c-5gtj2\" (UID: \"58d085ee-1389-48a6-b185-a036265014d2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620126 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flm7p\" (UniqueName: \"kubernetes.io/projected/1dcbcf7f-d600-4279-a14f-edf6226c25e2-kube-api-access-flm7p\") pod \"kube-storage-version-migrator-operator-b67b599dd-xc46h\" (UID: \"1dcbcf7f-d600-4279-a14f-edf6226c25e2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xc46h" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620144 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-service-ca\") pod \"console-f9d7485db-5q44l\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620161 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7s5rk\" (UniqueName: \"kubernetes.io/projected/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-kube-api-access-7s5rk\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620180 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3aae1458-34de-4abf-b57e-8d3aefaeb644-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-tvt5b\" (UID: \"3aae1458-34de-4abf-b57e-8d3aefaeb644\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-tvt5b" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620196 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26bnb\" (UniqueName: \"kubernetes.io/projected/2b5de9fd-b25e-4062-9492-71eb94e19a44-kube-api-access-26bnb\") pod \"package-server-manager-789f6589d5-5j8t6\" (UID: \"2b5de9fd-b25e-4062-9492-71eb94e19a44\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5j8t6" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620229 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c3efa77-9487-4940-a894-e8a10f0c9453-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dssr9\" (UID: \"9c3efa77-9487-4940-a894-e8a10f0c9453\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dssr9" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620260 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620276 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7af80181-f4a1-4112-8792-87f958d2f22e-serving-cert\") pod \"etcd-operator-b45778765-48qbf\" (UID: \"7af80181-f4a1-4112-8792-87f958d2f22e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620291 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620305 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-audit\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620322 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6867809-73e3-4291-97d4-cb38b0aeae7b-serving-cert\") pod \"controller-manager-879f6c89f-lgdvx\" (UID: \"e6867809-73e3-4291-97d4-cb38b0aeae7b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620345 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d4f9a31b-650d-46d5-b8d3-9176e2048beb-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7cjtd\" (UID: \"d4f9a31b-650d-46d5-b8d3-9176e2048beb\") " pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620359 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rscbr\" (UniqueName: \"kubernetes.io/projected/6907d5b6-8950-45a6-bac5-2bc61c0d8427-kube-api-access-rscbr\") pod \"packageserver-d55dfcdfc-4jb5d\" (UID: \"6907d5b6-8950-45a6-bac5-2bc61c0d8427\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620375 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620392 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e0e1068-6e20-498c-a8b0-c61513824a86-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-dm7lm\" (UID: \"4e0e1068-6e20-498c-a8b0-c61513824a86\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dm7lm" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620407 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/df55607e-2c97-4bd5-b3e0-3a748c6482ef-trusted-ca\") pod \"ingress-operator-5b745b69d9-l57dg\" (UID: \"df55607e-2c97-4bd5-b3e0-3a748c6482ef\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-l57dg" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620420 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6907d5b6-8950-45a6-bac5-2bc61c0d8427-webhook-cert\") pod \"packageserver-d55dfcdfc-4jb5d\" (UID: \"6907d5b6-8950-45a6-bac5-2bc61c0d8427\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620449 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620465 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2l4md\" (UniqueName: \"kubernetes.io/projected/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-kube-api-access-2l4md\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620482 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5845k\" (UniqueName: \"kubernetes.io/projected/778aaf63-ea06-45a1-b031-efb3809bf0ec-kube-api-access-5845k\") pod \"migrator-59844c95c7-vxwmd\" (UID: \"778aaf63-ea06-45a1-b031-efb3809bf0ec\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vxwmd" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620496 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/862286d9-08dd-4330-99ed-04e3b17f2a5b-profile-collector-cert\") pod \"olm-operator-6b444d44fb-dgsrr\" (UID: \"862286d9-08dd-4330-99ed-04e3b17f2a5b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dgsrr" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620519 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620534 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/6e022686-4480-4610-9760-e1487bb99265-stats-auth\") pod \"router-default-5444994796-qp498\" (UID: \"6e022686-4480-4610-9760-e1487bb99265\") " pod="openshift-ingress/router-default-5444994796-qp498" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620550 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/c3193888-6214-44cb-a0bc-0091046b80c2-images\") pod \"machine-api-operator-5694c8668f-2k94j\" (UID: \"c3193888-6214-44cb-a0bc-0091046b80c2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k94j" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620566 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9bxg\" (UniqueName: \"kubernetes.io/projected/0ee99015-bffc-4ffb-a91c-f941cd33acd7-kube-api-access-m9bxg\") pod \"console-f9d7485db-5q44l\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620579 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a0ab392-4bad-4a96-b6cc-14b706777850-config\") pod \"kube-controller-manager-operator-78b949d7b-2rs7x\" (UID: \"1a0ab392-4bad-4a96-b6cc-14b706777850\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rs7x" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620594 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fc2e287d-2f35-40d9-ac9a-e8a67e3ea01e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-klx5p\" (UID: \"fc2e287d-2f35-40d9-ac9a-e8a67e3ea01e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-klx5p" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620609 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-etcd-client\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620623 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3aae1458-34de-4abf-b57e-8d3aefaeb644-config\") pod \"kube-apiserver-operator-766d6c64bb-tvt5b\" (UID: \"3aae1458-34de-4abf-b57e-8d3aefaeb644\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-tvt5b" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620639 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620656 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53565bc1-575c-4410-aaea-f6016117621f-serving-cert\") pod \"authentication-operator-69f744f599-7h4pp\" (UID: \"53565bc1-575c-4410-aaea-f6016117621f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7h4pp" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620670 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfctl\" (UniqueName: \"kubernetes.io/projected/4f054e02-d261-4d4e-9333-b7c469374c24-kube-api-access-dfctl\") pod \"service-ca-9c57cc56f-9lzhs\" (UID: \"4f054e02-d261-4d4e-9333-b7c469374c24\") " pod="openshift-service-ca/service-ca-9c57cc56f-9lzhs" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620687 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b5de9fd-b25e-4062-9492-71eb94e19a44-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-5j8t6\" (UID: \"2b5de9fd-b25e-4062-9492-71eb94e19a44\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5j8t6" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620702 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3f821a1-ba36-423a-b20b-82dc307a8c22-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-f4bff\" (UID: \"d3f821a1-ba36-423a-b20b-82dc307a8c22\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f4bff" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620716 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcjmw\" (UniqueName: \"kubernetes.io/projected/6e022686-4480-4610-9760-e1487bb99265-kube-api-access-qcjmw\") pod \"router-default-5444994796-qp498\" (UID: \"6e022686-4480-4610-9760-e1487bb99265\") " pod="openshift-ingress/router-default-5444994796-qp498" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620731 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-trusted-ca-bundle\") pod \"console-f9d7485db-5q44l\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620746 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzfm8\" (UniqueName: \"kubernetes.io/projected/4e0e1068-6e20-498c-a8b0-c61513824a86-kube-api-access-zzfm8\") pod \"openshift-apiserver-operator-796bbdcf4f-dm7lm\" (UID: \"4e0e1068-6e20-498c-a8b0-c61513824a86\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dm7lm" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620762 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/6fc18397-5fbe-41c5-a28a-eb550cc22dcd-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4zp57\" (UID: \"6fc18397-5fbe-41c5-a28a-eb550cc22dcd\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4zp57" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620777 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e-serving-cert\") pod \"service-ca-operator-777779d784-98nmz\" (UID: \"cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-98nmz" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620793 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-audit-policies\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620806 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e6867809-73e3-4291-97d4-cb38b0aeae7b-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-lgdvx\" (UID: \"e6867809-73e3-4291-97d4-cb38b0aeae7b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620821 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/16efdb77-5db5-410b-9f59-aed6293dbcab-machine-approver-tls\") pod \"machine-approver-56656f9798-wt2d2\" (UID: \"16efdb77-5db5-410b-9f59-aed6293dbcab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wt2d2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620836 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e022686-4480-4610-9760-e1487bb99265-service-ca-bundle\") pod \"router-default-5444994796-qp498\" (UID: \"6e022686-4480-4610-9760-e1487bb99265\") " pod="openshift-ingress/router-default-5444994796-qp498" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620851 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-image-import-ca\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620865 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/7af80181-f4a1-4112-8792-87f958d2f22e-etcd-service-ca\") pod \"etcd-operator-b45778765-48qbf\" (UID: \"7af80181-f4a1-4112-8792-87f958d2f22e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620880 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e0e1068-6e20-498c-a8b0-c61513824a86-config\") pod \"openshift-apiserver-operator-796bbdcf4f-dm7lm\" (UID: \"4e0e1068-6e20-498c-a8b0-c61513824a86\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dm7lm" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620896 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0ee99015-bffc-4ffb-a91c-f941cd33acd7-console-serving-cert\") pod \"console-f9d7485db-5q44l\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620909 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16efdb77-5db5-410b-9f59-aed6293dbcab-config\") pod \"machine-approver-56656f9798-wt2d2\" (UID: \"16efdb77-5db5-410b-9f59-aed6293dbcab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wt2d2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620923 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nz68r\" (UniqueName: \"kubernetes.io/projected/df55607e-2c97-4bd5-b3e0-3a748c6482ef-kube-api-access-nz68r\") pod \"ingress-operator-5b745b69d9-l57dg\" (UID: \"df55607e-2c97-4bd5-b3e0-3a748c6482ef\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-l57dg" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620938 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1dcbcf7f-d600-4279-a14f-edf6226c25e2-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-xc46h\" (UID: \"1dcbcf7f-d600-4279-a14f-edf6226c25e2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xc46h" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620953 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-encryption-config\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620954 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53565bc1-575c-4410-aaea-f6016117621f-service-ca-bundle\") pod \"authentication-operator-69f744f599-7h4pp\" (UID: \"53565bc1-575c-4410-aaea-f6016117621f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7h4pp" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620968 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jxfg\" (UniqueName: \"kubernetes.io/projected/53565bc1-575c-4410-aaea-f6016117621f-kube-api-access-4jxfg\") pod \"authentication-operator-69f744f599-7h4pp\" (UID: \"53565bc1-575c-4410-aaea-f6016117621f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7h4pp" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621018 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zq52\" (UniqueName: \"kubernetes.io/projected/cf39a416-5cb7-4d33-b794-09fb70b25f4a-kube-api-access-9zq52\") pod \"machine-config-operator-74547568cd-2zqzn\" (UID: \"cf39a416-5cb7-4d33-b794-09fb70b25f4a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2zqzn" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621057 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/16efdb77-5db5-410b-9f59-aed6293dbcab-auth-proxy-config\") pod \"machine-approver-56656f9798-wt2d2\" (UID: \"16efdb77-5db5-410b-9f59-aed6293dbcab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wt2d2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621075 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6e022686-4480-4610-9760-e1487bb99265-metrics-certs\") pod \"router-default-5444994796-qp498\" (UID: \"6e022686-4480-4610-9760-e1487bb99265\") " pod="openshift-ingress/router-default-5444994796-qp498" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621090 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7af80181-f4a1-4112-8792-87f958d2f22e-config\") pod \"etcd-operator-b45778765-48qbf\" (UID: \"7af80181-f4a1-4112-8792-87f958d2f22e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621108 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gj84\" (UniqueName: \"kubernetes.io/projected/d682c234-918c-4189-91bb-09e90f9da4db-kube-api-access-5gj84\") pod \"dns-operator-744455d44c-69jvv\" (UID: \"d682c234-918c-4189-91bb-09e90f9da4db\") " pod="openshift-dns-operator/dns-operator-744455d44c-69jvv" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621125 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-oauth-serving-cert\") pod \"console-f9d7485db-5q44l\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621140 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-config\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621161 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/6e022686-4480-4610-9760-e1487bb99265-default-certificate\") pod \"router-default-5444994796-qp498\" (UID: \"6e022686-4480-4610-9760-e1487bb99265\") " pod="openshift-ingress/router-default-5444994796-qp498" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621178 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4jzh\" (UniqueName: \"kubernetes.io/projected/d4f9a31b-650d-46d5-b8d3-9176e2048beb-kube-api-access-t4jzh\") pod \"marketplace-operator-79b997595-7cjtd\" (UID: \"d4f9a31b-650d-46d5-b8d3-9176e2048beb\") " pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621196 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a0ab392-4bad-4a96-b6cc-14b706777850-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2rs7x\" (UID: \"1a0ab392-4bad-4a96-b6cc-14b706777850\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rs7x" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621211 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgkhs\" (UniqueName: \"kubernetes.io/projected/d3f821a1-ba36-423a-b20b-82dc307a8c22-kube-api-access-xgkhs\") pod \"openshift-controller-manager-operator-756b6f6bc6-f4bff\" (UID: \"d3f821a1-ba36-423a-b20b-82dc307a8c22\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f4bff" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621247 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621262 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtlxf\" (UniqueName: \"kubernetes.io/projected/7af80181-f4a1-4112-8792-87f958d2f22e-kube-api-access-gtlxf\") pod \"etcd-operator-b45778765-48qbf\" (UID: \"7af80181-f4a1-4112-8792-87f958d2f22e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621280 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e6867809-73e3-4291-97d4-cb38b0aeae7b-client-ca\") pod \"controller-manager-879f6c89f-lgdvx\" (UID: \"e6867809-73e3-4291-97d4-cb38b0aeae7b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621296 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58d085ee-1389-48a6-b185-a036265014d2-config\") pod \"route-controller-manager-6576b87f9c-5gtj2\" (UID: \"58d085ee-1389-48a6-b185-a036265014d2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621315 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvfh9\" (UniqueName: \"kubernetes.io/projected/4d30e7c6-48d6-49dd-b6cc-f983d70eecd0-kube-api-access-lvfh9\") pod \"catalog-operator-68c6474976-fbsvl\" (UID: \"4d30e7c6-48d6-49dd-b6cc-f983d70eecd0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fbsvl" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621326 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-console-config\") pod \"console-f9d7485db-5q44l\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621333 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6867809-73e3-4291-97d4-cb38b0aeae7b-config\") pod \"controller-manager-879f6c89f-lgdvx\" (UID: \"e6867809-73e3-4291-97d4-cb38b0aeae7b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621314 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-trusted-ca-bundle\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.620608 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/53565bc1-575c-4410-aaea-f6016117621f-config\") pod \"authentication-operator-69f744f599-7h4pp\" (UID: \"53565bc1-575c-4410-aaea-f6016117621f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7h4pp" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621641 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3193888-6214-44cb-a0bc-0091046b80c2-config\") pod \"machine-api-operator-5694c8668f-2k94j\" (UID: \"c3193888-6214-44cb-a0bc-0091046b80c2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k94j" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621755 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-node-pullsecrets\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621775 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ae732ae0-1a69-47f0-8895-a4a9ef61ae1a-serving-cert\") pod \"console-operator-58897d9998-2wlbg\" (UID: \"ae732ae0-1a69-47f0-8895-a4a9ef61ae1a\") " pod="openshift-console-operator/console-operator-58897d9998-2wlbg" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621792 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/4f054e02-d261-4d4e-9333-b7c469374c24-signing-key\") pod \"service-ca-9c57cc56f-9lzhs\" (UID: \"4f054e02-d261-4d4e-9333-b7c469374c24\") " pod="openshift-service-ca/service-ca-9c57cc56f-9lzhs" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621808 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3f821a1-ba36-423a-b20b-82dc307a8c22-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-f4bff\" (UID: \"d3f821a1-ba36-423a-b20b-82dc307a8c22\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f4bff" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621824 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6907d5b6-8950-45a6-bac5-2bc61c0d8427-apiservice-cert\") pod \"packageserver-d55dfcdfc-4jb5d\" (UID: \"6907d5b6-8950-45a6-bac5-2bc61c0d8427\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621838 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3aae1458-34de-4abf-b57e-8d3aefaeb644-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-tvt5b\" (UID: \"3aae1458-34de-4abf-b57e-8d3aefaeb644\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-tvt5b" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621855 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkpbt\" (UniqueName: \"kubernetes.io/projected/cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e-kube-api-access-dkpbt\") pod \"service-ca-operator-777779d784-98nmz\" (UID: \"cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-98nmz" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621874 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621890 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cf39a416-5cb7-4d33-b794-09fb70b25f4a-images\") pod \"machine-config-operator-74547568cd-2zqzn\" (UID: \"cf39a416-5cb7-4d33-b794-09fb70b25f4a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2zqzn" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621905 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/df55607e-2c97-4bd5-b3e0-3a748c6482ef-metrics-tls\") pod \"ingress-operator-5b745b69d9-l57dg\" (UID: \"df55607e-2c97-4bd5-b3e0-3a748c6482ef\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-l57dg" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621922 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-encryption-config\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621954 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9c3efa77-9487-4940-a894-e8a10f0c9453-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dssr9\" (UID: \"9c3efa77-9487-4940-a894-e8a10f0c9453\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dssr9" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621968 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e-config\") pod \"service-ca-operator-777779d784-98nmz\" (UID: \"cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-98nmz" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621984 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/862286d9-08dd-4330-99ed-04e3b17f2a5b-srv-cert\") pod \"olm-operator-6b444d44fb-dgsrr\" (UID: \"862286d9-08dd-4330-99ed-04e3b17f2a5b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dgsrr" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.621998 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/6907d5b6-8950-45a6-bac5-2bc61c0d8427-tmpfs\") pod \"packageserver-d55dfcdfc-4jb5d\" (UID: \"6907d5b6-8950-45a6-bac5-2bc61c0d8427\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.622013 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7af80181-f4a1-4112-8792-87f958d2f22e-etcd-client\") pod \"etcd-operator-b45778765-48qbf\" (UID: \"7af80181-f4a1-4112-8792-87f958d2f22e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.622029 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.622044 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4q5rc\" (UniqueName: \"kubernetes.io/projected/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-kube-api-access-4q5rc\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.622061 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/df55607e-2c97-4bd5-b3e0-3a748c6482ef-bound-sa-token\") pod \"ingress-operator-5b745b69d9-l57dg\" (UID: \"df55607e-2c97-4bd5-b3e0-3a748c6482ef\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-l57dg" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.622074 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1a0ab392-4bad-4a96-b6cc-14b706777850-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2rs7x\" (UID: \"1a0ab392-4bad-4a96-b6cc-14b706777850\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rs7x" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.622082 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/16efdb77-5db5-410b-9f59-aed6293dbcab-auth-proxy-config\") pod \"machine-approver-56656f9798-wt2d2\" (UID: \"16efdb77-5db5-410b-9f59-aed6293dbcab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wt2d2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.622091 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d682c234-918c-4189-91bb-09e90f9da4db-metrics-tls\") pod \"dns-operator-744455d44c-69jvv\" (UID: \"d682c234-918c-4189-91bb-09e90f9da4db\") " pod="openshift-dns-operator/dns-operator-744455d44c-69jvv" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.622108 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/c3193888-6214-44cb-a0bc-0091046b80c2-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-2k94j\" (UID: \"c3193888-6214-44cb-a0bc-0091046b80c2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k94j" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.622124 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-audit-dir\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.622138 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58d085ee-1389-48a6-b185-a036265014d2-serving-cert\") pod \"route-controller-manager-6576b87f9c-5gtj2\" (UID: \"58d085ee-1389-48a6-b185-a036265014d2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.622153 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.622168 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-etcd-serving-ca\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.622194 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53565bc1-575c-4410-aaea-f6016117621f-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-7h4pp\" (UID: \"53565bc1-575c-4410-aaea-f6016117621f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7h4pp" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.622210 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsdrs\" (UniqueName: \"kubernetes.io/projected/fc2e287d-2f35-40d9-ac9a-e8a67e3ea01e-kube-api-access-wsdrs\") pod \"machine-config-controller-84d6567774-klx5p\" (UID: \"fc2e287d-2f35-40d9-ac9a-e8a67e3ea01e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-klx5p" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.622238 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gcdm\" (UniqueName: \"kubernetes.io/projected/c3193888-6214-44cb-a0bc-0091046b80c2-kube-api-access-7gcdm\") pod \"machine-api-operator-5694c8668f-2k94j\" (UID: \"c3193888-6214-44cb-a0bc-0091046b80c2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k94j" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.622292 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ae732ae0-1a69-47f0-8895-a4a9ef61ae1a-trusted-ca\") pod \"console-operator-58897d9998-2wlbg\" (UID: \"ae732ae0-1a69-47f0-8895-a4a9ef61ae1a\") " pod="openshift-console-operator/console-operator-58897d9998-2wlbg" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.622314 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e6867809-73e3-4291-97d4-cb38b0aeae7b-client-ca\") pod \"controller-manager-879f6c89f-lgdvx\" (UID: \"e6867809-73e3-4291-97d4-cb38b0aeae7b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.622409 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6867809-73e3-4291-97d4-cb38b0aeae7b-config\") pod \"controller-manager-879f6c89f-lgdvx\" (UID: \"e6867809-73e3-4291-97d4-cb38b0aeae7b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.622737 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-audit\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.622776 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-config\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.622882 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.623329 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ae732ae0-1a69-47f0-8895-a4a9ef61ae1a-config\") pod \"console-operator-58897d9998-2wlbg\" (UID: \"ae732ae0-1a69-47f0-8895-a4a9ef61ae1a\") " pod="openshift-console-operator/console-operator-58897d9998-2wlbg" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.624051 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-oauth-serving-cert\") pod \"console-f9d7485db-5q44l\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.624814 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-audit-policies\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.625177 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16efdb77-5db5-410b-9f59-aed6293dbcab-config\") pod \"machine-approver-56656f9798-wt2d2\" (UID: \"16efdb77-5db5-410b-9f59-aed6293dbcab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wt2d2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.625910 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e0e1068-6e20-498c-a8b0-c61513824a86-config\") pod \"openshift-apiserver-operator-796bbdcf4f-dm7lm\" (UID: \"4e0e1068-6e20-498c-a8b0-c61513824a86\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dm7lm" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.625926 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-trusted-ca-bundle\") pod \"console-f9d7485db-5q44l\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.626023 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.626021 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-image-import-ca\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.626649 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6867809-73e3-4291-97d4-cb38b0aeae7b-serving-cert\") pod \"controller-manager-879f6c89f-lgdvx\" (UID: \"e6867809-73e3-4291-97d4-cb38b0aeae7b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.626985 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/58d085ee-1389-48a6-b185-a036265014d2-client-ca\") pod \"route-controller-manager-6576b87f9c-5gtj2\" (UID: \"58d085ee-1389-48a6-b185-a036265014d2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.627364 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/16efdb77-5db5-410b-9f59-aed6293dbcab-machine-approver-tls\") pod \"machine-approver-56656f9798-wt2d2\" (UID: \"16efdb77-5db5-410b-9f59-aed6293dbcab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wt2d2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.627369 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-serving-cert\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.627628 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-service-ca\") pod \"console-f9d7485db-5q44l\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.627781 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0ee99015-bffc-4ffb-a91c-f941cd33acd7-console-oauth-config\") pod \"console-f9d7485db-5q44l\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.627949 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58d085ee-1389-48a6-b185-a036265014d2-config\") pod \"route-controller-manager-6576b87f9c-5gtj2\" (UID: \"58d085ee-1389-48a6-b185-a036265014d2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.628012 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.629812 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.628052 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-node-pullsecrets\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.628109 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-etcd-client\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.628278 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.628480 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-audit-policies\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.628705 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-audit-dir\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.629329 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-serving-cert\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.628033 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.623336 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.630766 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e0e1068-6e20-498c-a8b0-c61513824a86-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-dm7lm\" (UID: \"4e0e1068-6e20-498c-a8b0-c61513824a86\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dm7lm" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.631155 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/53565bc1-575c-4410-aaea-f6016117621f-serving-cert\") pod \"authentication-operator-69f744f599-7h4pp\" (UID: \"53565bc1-575c-4410-aaea-f6016117621f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7h4pp" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.631171 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0ee99015-bffc-4ffb-a91c-f941cd33acd7-console-serving-cert\") pod \"console-f9d7485db-5q44l\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.631346 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.631497 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53565bc1-575c-4410-aaea-f6016117621f-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-7h4pp\" (UID: \"53565bc1-575c-4410-aaea-f6016117621f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7h4pp" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.631894 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-etcd-serving-ca\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.632147 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.632179 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-etcd-client\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.632824 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e6867809-73e3-4291-97d4-cb38b0aeae7b-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-lgdvx\" (UID: \"e6867809-73e3-4291-97d4-cb38b0aeae7b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.633148 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/c3193888-6214-44cb-a0bc-0091046b80c2-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-2k94j\" (UID: \"c3193888-6214-44cb-a0bc-0091046b80c2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k94j" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.634398 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.634991 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.635008 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/c3193888-6214-44cb-a0bc-0091046b80c2-images\") pod \"machine-api-operator-5694c8668f-2k94j\" (UID: \"c3193888-6214-44cb-a0bc-0091046b80c2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k94j" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.635319 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d682c234-918c-4189-91bb-09e90f9da4db-metrics-tls\") pod \"dns-operator-744455d44c-69jvv\" (UID: \"d682c234-918c-4189-91bb-09e90f9da4db\") " pod="openshift-dns-operator/dns-operator-744455d44c-69jvv" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.635394 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-encryption-config\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.635625 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ae732ae0-1a69-47f0-8895-a4a9ef61ae1a-serving-cert\") pod \"console-operator-58897d9998-2wlbg\" (UID: \"ae732ae0-1a69-47f0-8895-a4a9ef61ae1a\") " pod="openshift-console-operator/console-operator-58897d9998-2wlbg" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.635665 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-encryption-config\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.635768 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.635916 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58d085ee-1389-48a6-b185-a036265014d2-serving-cert\") pod \"route-controller-manager-6576b87f9c-5gtj2\" (UID: \"58d085ee-1389-48a6-b185-a036265014d2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.636049 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/6fc18397-5fbe-41c5-a28a-eb550cc22dcd-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4zp57\" (UID: \"6fc18397-5fbe-41c5-a28a-eb550cc22dcd\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4zp57" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.636098 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.637480 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.644625 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.664145 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.685687 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.704324 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.722811 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cf39a416-5cb7-4d33-b794-09fb70b25f4a-auth-proxy-config\") pod \"machine-config-operator-74547568cd-2zqzn\" (UID: \"cf39a416-5cb7-4d33-b794-09fb70b25f4a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2zqzn" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.722846 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/4f054e02-d261-4d4e-9333-b7c469374c24-signing-cabundle\") pod \"service-ca-9c57cc56f-9lzhs\" (UID: \"4f054e02-d261-4d4e-9333-b7c469374c24\") " pod="openshift-service-ca/service-ca-9c57cc56f-9lzhs" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.722865 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c3efa77-9487-4940-a894-e8a10f0c9453-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dssr9\" (UID: \"9c3efa77-9487-4940-a894-e8a10f0c9453\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dssr9" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.722880 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d4f9a31b-650d-46d5-b8d3-9176e2048beb-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7cjtd\" (UID: \"d4f9a31b-650d-46d5-b8d3-9176e2048beb\") " pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.722896 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flm7p\" (UniqueName: \"kubernetes.io/projected/1dcbcf7f-d600-4279-a14f-edf6226c25e2-kube-api-access-flm7p\") pod \"kube-storage-version-migrator-operator-b67b599dd-xc46h\" (UID: \"1dcbcf7f-d600-4279-a14f-edf6226c25e2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xc46h" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.722916 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3aae1458-34de-4abf-b57e-8d3aefaeb644-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-tvt5b\" (UID: \"3aae1458-34de-4abf-b57e-8d3aefaeb644\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-tvt5b" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.722930 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26bnb\" (UniqueName: \"kubernetes.io/projected/2b5de9fd-b25e-4062-9492-71eb94e19a44-kube-api-access-26bnb\") pod \"package-server-manager-789f6589d5-5j8t6\" (UID: \"2b5de9fd-b25e-4062-9492-71eb94e19a44\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5j8t6" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.722945 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c3efa77-9487-4940-a894-e8a10f0c9453-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dssr9\" (UID: \"9c3efa77-9487-4940-a894-e8a10f0c9453\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dssr9" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.722965 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7af80181-f4a1-4112-8792-87f958d2f22e-serving-cert\") pod \"etcd-operator-b45778765-48qbf\" (UID: \"7af80181-f4a1-4112-8792-87f958d2f22e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.722989 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d4f9a31b-650d-46d5-b8d3-9176e2048beb-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7cjtd\" (UID: \"d4f9a31b-650d-46d5-b8d3-9176e2048beb\") " pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723003 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rscbr\" (UniqueName: \"kubernetes.io/projected/6907d5b6-8950-45a6-bac5-2bc61c0d8427-kube-api-access-rscbr\") pod \"packageserver-d55dfcdfc-4jb5d\" (UID: \"6907d5b6-8950-45a6-bac5-2bc61c0d8427\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723019 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/df55607e-2c97-4bd5-b3e0-3a748c6482ef-trusted-ca\") pod \"ingress-operator-5b745b69d9-l57dg\" (UID: \"df55607e-2c97-4bd5-b3e0-3a748c6482ef\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-l57dg" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723031 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6907d5b6-8950-45a6-bac5-2bc61c0d8427-webhook-cert\") pod \"packageserver-d55dfcdfc-4jb5d\" (UID: \"6907d5b6-8950-45a6-bac5-2bc61c0d8427\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723049 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5845k\" (UniqueName: \"kubernetes.io/projected/778aaf63-ea06-45a1-b031-efb3809bf0ec-kube-api-access-5845k\") pod \"migrator-59844c95c7-vxwmd\" (UID: \"778aaf63-ea06-45a1-b031-efb3809bf0ec\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vxwmd" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723064 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/862286d9-08dd-4330-99ed-04e3b17f2a5b-profile-collector-cert\") pod \"olm-operator-6b444d44fb-dgsrr\" (UID: \"862286d9-08dd-4330-99ed-04e3b17f2a5b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dgsrr" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723083 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/6e022686-4480-4610-9760-e1487bb99265-stats-auth\") pod \"router-default-5444994796-qp498\" (UID: \"6e022686-4480-4610-9760-e1487bb99265\") " pod="openshift-ingress/router-default-5444994796-qp498" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723102 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a0ab392-4bad-4a96-b6cc-14b706777850-config\") pod \"kube-controller-manager-operator-78b949d7b-2rs7x\" (UID: \"1a0ab392-4bad-4a96-b6cc-14b706777850\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rs7x" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723115 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fc2e287d-2f35-40d9-ac9a-e8a67e3ea01e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-klx5p\" (UID: \"fc2e287d-2f35-40d9-ac9a-e8a67e3ea01e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-klx5p" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723129 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3aae1458-34de-4abf-b57e-8d3aefaeb644-config\") pod \"kube-apiserver-operator-766d6c64bb-tvt5b\" (UID: \"3aae1458-34de-4abf-b57e-8d3aefaeb644\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-tvt5b" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723144 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfctl\" (UniqueName: \"kubernetes.io/projected/4f054e02-d261-4d4e-9333-b7c469374c24-kube-api-access-dfctl\") pod \"service-ca-9c57cc56f-9lzhs\" (UID: \"4f054e02-d261-4d4e-9333-b7c469374c24\") " pod="openshift-service-ca/service-ca-9c57cc56f-9lzhs" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723159 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b5de9fd-b25e-4062-9492-71eb94e19a44-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-5j8t6\" (UID: \"2b5de9fd-b25e-4062-9492-71eb94e19a44\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5j8t6" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723174 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3f821a1-ba36-423a-b20b-82dc307a8c22-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-f4bff\" (UID: \"d3f821a1-ba36-423a-b20b-82dc307a8c22\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f4bff" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723188 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcjmw\" (UniqueName: \"kubernetes.io/projected/6e022686-4480-4610-9760-e1487bb99265-kube-api-access-qcjmw\") pod \"router-default-5444994796-qp498\" (UID: \"6e022686-4480-4610-9760-e1487bb99265\") " pod="openshift-ingress/router-default-5444994796-qp498" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723207 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e-serving-cert\") pod \"service-ca-operator-777779d784-98nmz\" (UID: \"cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-98nmz" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723239 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e022686-4480-4610-9760-e1487bb99265-service-ca-bundle\") pod \"router-default-5444994796-qp498\" (UID: \"6e022686-4480-4610-9760-e1487bb99265\") " pod="openshift-ingress/router-default-5444994796-qp498" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723254 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/7af80181-f4a1-4112-8792-87f958d2f22e-etcd-service-ca\") pod \"etcd-operator-b45778765-48qbf\" (UID: \"7af80181-f4a1-4112-8792-87f958d2f22e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723272 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nz68r\" (UniqueName: \"kubernetes.io/projected/df55607e-2c97-4bd5-b3e0-3a748c6482ef-kube-api-access-nz68r\") pod \"ingress-operator-5b745b69d9-l57dg\" (UID: \"df55607e-2c97-4bd5-b3e0-3a748c6482ef\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-l57dg" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723286 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1dcbcf7f-d600-4279-a14f-edf6226c25e2-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-xc46h\" (UID: \"1dcbcf7f-d600-4279-a14f-edf6226c25e2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xc46h" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723304 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zq52\" (UniqueName: \"kubernetes.io/projected/cf39a416-5cb7-4d33-b794-09fb70b25f4a-kube-api-access-9zq52\") pod \"machine-config-operator-74547568cd-2zqzn\" (UID: \"cf39a416-5cb7-4d33-b794-09fb70b25f4a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2zqzn" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723326 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6e022686-4480-4610-9760-e1487bb99265-metrics-certs\") pod \"router-default-5444994796-qp498\" (UID: \"6e022686-4480-4610-9760-e1487bb99265\") " pod="openshift-ingress/router-default-5444994796-qp498" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723342 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7af80181-f4a1-4112-8792-87f958d2f22e-config\") pod \"etcd-operator-b45778765-48qbf\" (UID: \"7af80181-f4a1-4112-8792-87f958d2f22e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723360 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/6e022686-4480-4610-9760-e1487bb99265-default-certificate\") pod \"router-default-5444994796-qp498\" (UID: \"6e022686-4480-4610-9760-e1487bb99265\") " pod="openshift-ingress/router-default-5444994796-qp498" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723374 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4jzh\" (UniqueName: \"kubernetes.io/projected/d4f9a31b-650d-46d5-b8d3-9176e2048beb-kube-api-access-t4jzh\") pod \"marketplace-operator-79b997595-7cjtd\" (UID: \"d4f9a31b-650d-46d5-b8d3-9176e2048beb\") " pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723389 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a0ab392-4bad-4a96-b6cc-14b706777850-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2rs7x\" (UID: \"1a0ab392-4bad-4a96-b6cc-14b706777850\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rs7x" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723403 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgkhs\" (UniqueName: \"kubernetes.io/projected/d3f821a1-ba36-423a-b20b-82dc307a8c22-kube-api-access-xgkhs\") pod \"openshift-controller-manager-operator-756b6f6bc6-f4bff\" (UID: \"d3f821a1-ba36-423a-b20b-82dc307a8c22\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f4bff" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723418 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtlxf\" (UniqueName: \"kubernetes.io/projected/7af80181-f4a1-4112-8792-87f958d2f22e-kube-api-access-gtlxf\") pod \"etcd-operator-b45778765-48qbf\" (UID: \"7af80181-f4a1-4112-8792-87f958d2f22e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723461 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvfh9\" (UniqueName: \"kubernetes.io/projected/4d30e7c6-48d6-49dd-b6cc-f983d70eecd0-kube-api-access-lvfh9\") pod \"catalog-operator-68c6474976-fbsvl\" (UID: \"4d30e7c6-48d6-49dd-b6cc-f983d70eecd0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fbsvl" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723477 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/4f054e02-d261-4d4e-9333-b7c469374c24-signing-key\") pod \"service-ca-9c57cc56f-9lzhs\" (UID: \"4f054e02-d261-4d4e-9333-b7c469374c24\") " pod="openshift-service-ca/service-ca-9c57cc56f-9lzhs" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723491 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3f821a1-ba36-423a-b20b-82dc307a8c22-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-f4bff\" (UID: \"d3f821a1-ba36-423a-b20b-82dc307a8c22\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f4bff" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723507 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6907d5b6-8950-45a6-bac5-2bc61c0d8427-apiservice-cert\") pod \"packageserver-d55dfcdfc-4jb5d\" (UID: \"6907d5b6-8950-45a6-bac5-2bc61c0d8427\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723520 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3aae1458-34de-4abf-b57e-8d3aefaeb644-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-tvt5b\" (UID: \"3aae1458-34de-4abf-b57e-8d3aefaeb644\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-tvt5b" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723535 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkpbt\" (UniqueName: \"kubernetes.io/projected/cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e-kube-api-access-dkpbt\") pod \"service-ca-operator-777779d784-98nmz\" (UID: \"cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-98nmz" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723550 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cf39a416-5cb7-4d33-b794-09fb70b25f4a-images\") pod \"machine-config-operator-74547568cd-2zqzn\" (UID: \"cf39a416-5cb7-4d33-b794-09fb70b25f4a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2zqzn" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723564 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/df55607e-2c97-4bd5-b3e0-3a748c6482ef-metrics-tls\") pod \"ingress-operator-5b745b69d9-l57dg\" (UID: \"df55607e-2c97-4bd5-b3e0-3a748c6482ef\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-l57dg" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723577 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9c3efa77-9487-4940-a894-e8a10f0c9453-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dssr9\" (UID: \"9c3efa77-9487-4940-a894-e8a10f0c9453\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dssr9" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723592 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e-config\") pod \"service-ca-operator-777779d784-98nmz\" (UID: \"cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-98nmz" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723606 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/862286d9-08dd-4330-99ed-04e3b17f2a5b-srv-cert\") pod \"olm-operator-6b444d44fb-dgsrr\" (UID: \"862286d9-08dd-4330-99ed-04e3b17f2a5b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dgsrr" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723621 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/6907d5b6-8950-45a6-bac5-2bc61c0d8427-tmpfs\") pod \"packageserver-d55dfcdfc-4jb5d\" (UID: \"6907d5b6-8950-45a6-bac5-2bc61c0d8427\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723635 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7af80181-f4a1-4112-8792-87f958d2f22e-etcd-client\") pod \"etcd-operator-b45778765-48qbf\" (UID: \"7af80181-f4a1-4112-8792-87f958d2f22e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723652 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/df55607e-2c97-4bd5-b3e0-3a748c6482ef-bound-sa-token\") pod \"ingress-operator-5b745b69d9-l57dg\" (UID: \"df55607e-2c97-4bd5-b3e0-3a748c6482ef\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-l57dg" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723665 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1a0ab392-4bad-4a96-b6cc-14b706777850-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2rs7x\" (UID: \"1a0ab392-4bad-4a96-b6cc-14b706777850\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rs7x" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723693 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsdrs\" (UniqueName: \"kubernetes.io/projected/fc2e287d-2f35-40d9-ac9a-e8a67e3ea01e-kube-api-access-wsdrs\") pod \"machine-config-controller-84d6567774-klx5p\" (UID: \"fc2e287d-2f35-40d9-ac9a-e8a67e3ea01e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-klx5p" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723713 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/4d30e7c6-48d6-49dd-b6cc-f983d70eecd0-srv-cert\") pod \"catalog-operator-68c6474976-fbsvl\" (UID: \"4d30e7c6-48d6-49dd-b6cc-f983d70eecd0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fbsvl" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723727 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fc2e287d-2f35-40d9-ac9a-e8a67e3ea01e-proxy-tls\") pod \"machine-config-controller-84d6567774-klx5p\" (UID: \"fc2e287d-2f35-40d9-ac9a-e8a67e3ea01e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-klx5p" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723744 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlxgs\" (UniqueName: \"kubernetes.io/projected/5edaffb3-e33b-45b9-89fa-39322022da37-kube-api-access-mlxgs\") pod \"downloads-7954f5f757-bmqbd\" (UID: \"5edaffb3-e33b-45b9-89fa-39322022da37\") " pod="openshift-console/downloads-7954f5f757-bmqbd" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723760 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9kz2\" (UniqueName: \"kubernetes.io/projected/862286d9-08dd-4330-99ed-04e3b17f2a5b-kube-api-access-l9kz2\") pod \"olm-operator-6b444d44fb-dgsrr\" (UID: \"862286d9-08dd-4330-99ed-04e3b17f2a5b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dgsrr" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723774 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/4d30e7c6-48d6-49dd-b6cc-f983d70eecd0-profile-collector-cert\") pod \"catalog-operator-68c6474976-fbsvl\" (UID: \"4d30e7c6-48d6-49dd-b6cc-f983d70eecd0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fbsvl" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723788 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cf39a416-5cb7-4d33-b794-09fb70b25f4a-proxy-tls\") pod \"machine-config-operator-74547568cd-2zqzn\" (UID: \"cf39a416-5cb7-4d33-b794-09fb70b25f4a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2zqzn" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723800 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/7af80181-f4a1-4112-8792-87f958d2f22e-etcd-ca\") pod \"etcd-operator-b45778765-48qbf\" (UID: \"7af80181-f4a1-4112-8792-87f958d2f22e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723821 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1dcbcf7f-d600-4279-a14f-edf6226c25e2-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-xc46h\" (UID: \"1dcbcf7f-d600-4279-a14f-edf6226c25e2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xc46h" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.723875 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fc2e287d-2f35-40d9-ac9a-e8a67e3ea01e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-klx5p\" (UID: \"fc2e287d-2f35-40d9-ac9a-e8a67e3ea01e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-klx5p" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.724003 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cf39a416-5cb7-4d33-b794-09fb70b25f4a-auth-proxy-config\") pod \"machine-config-operator-74547568cd-2zqzn\" (UID: \"cf39a416-5cb7-4d33-b794-09fb70b25f4a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2zqzn" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.724126 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7af80181-f4a1-4112-8792-87f958d2f22e-config\") pod \"etcd-operator-b45778765-48qbf\" (UID: \"7af80181-f4a1-4112-8792-87f958d2f22e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.724205 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/6907d5b6-8950-45a6-bac5-2bc61c0d8427-tmpfs\") pod \"packageserver-d55dfcdfc-4jb5d\" (UID: \"6907d5b6-8950-45a6-bac5-2bc61c0d8427\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.724693 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/7af80181-f4a1-4112-8792-87f958d2f22e-etcd-ca\") pod \"etcd-operator-b45778765-48qbf\" (UID: \"7af80181-f4a1-4112-8792-87f958d2f22e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.724916 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.725232 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a0ab392-4bad-4a96-b6cc-14b706777850-config\") pod \"kube-controller-manager-operator-78b949d7b-2rs7x\" (UID: \"1a0ab392-4bad-4a96-b6cc-14b706777850\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rs7x" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.725816 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7af80181-f4a1-4112-8792-87f958d2f22e-serving-cert\") pod \"etcd-operator-b45778765-48qbf\" (UID: \"7af80181-f4a1-4112-8792-87f958d2f22e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.727026 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a0ab392-4bad-4a96-b6cc-14b706777850-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2rs7x\" (UID: \"1a0ab392-4bad-4a96-b6cc-14b706777850\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rs7x" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.727145 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7af80181-f4a1-4112-8792-87f958d2f22e-etcd-client\") pod \"etcd-operator-b45778765-48qbf\" (UID: \"7af80181-f4a1-4112-8792-87f958d2f22e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.744022 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.764271 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.784762 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.795931 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3f821a1-ba36-423a-b20b-82dc307a8c22-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-f4bff\" (UID: \"d3f821a1-ba36-423a-b20b-82dc307a8c22\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f4bff" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.804757 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.815523 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3f821a1-ba36-423a-b20b-82dc307a8c22-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-f4bff\" (UID: \"d3f821a1-ba36-423a-b20b-82dc307a8c22\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f4bff" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.825103 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.844571 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.864003 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.876503 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c3efa77-9487-4940-a894-e8a10f0c9453-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dssr9\" (UID: \"9c3efa77-9487-4940-a894-e8a10f0c9453\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dssr9" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.884399 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.894856 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c3efa77-9487-4940-a894-e8a10f0c9453-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dssr9\" (UID: \"9c3efa77-9487-4940-a894-e8a10f0c9453\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dssr9" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.904705 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.917058 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fc2e287d-2f35-40d9-ac9a-e8a67e3ea01e-proxy-tls\") pod \"machine-config-controller-84d6567774-klx5p\" (UID: \"fc2e287d-2f35-40d9-ac9a-e8a67e3ea01e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-klx5p" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.924675 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.935168 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/7af80181-f4a1-4112-8792-87f958d2f22e-etcd-service-ca\") pod \"etcd-operator-b45778765-48qbf\" (UID: \"7af80181-f4a1-4112-8792-87f958d2f22e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.944306 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.964845 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.976156 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3aae1458-34de-4abf-b57e-8d3aefaeb644-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-tvt5b\" (UID: \"3aae1458-34de-4abf-b57e-8d3aefaeb644\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-tvt5b" Oct 09 09:06:57 crc kubenswrapper[4710]: I1009 09:06:57.984233 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.004928 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.014713 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3aae1458-34de-4abf-b57e-8d3aefaeb644-config\") pod \"kube-apiserver-operator-766d6c64bb-tvt5b\" (UID: \"3aae1458-34de-4abf-b57e-8d3aefaeb644\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-tvt5b" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.024776 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.044250 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.064815 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.077035 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/6e022686-4480-4610-9760-e1487bb99265-default-certificate\") pod \"router-default-5444994796-qp498\" (UID: \"6e022686-4480-4610-9760-e1487bb99265\") " pod="openshift-ingress/router-default-5444994796-qp498" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.084711 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.096849 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/6e022686-4480-4610-9760-e1487bb99265-stats-auth\") pod \"router-default-5444994796-qp498\" (UID: \"6e022686-4480-4610-9760-e1487bb99265\") " pod="openshift-ingress/router-default-5444994796-qp498" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.104351 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.116460 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6e022686-4480-4610-9760-e1487bb99265-metrics-certs\") pod \"router-default-5444994796-qp498\" (UID: \"6e022686-4480-4610-9760-e1487bb99265\") " pod="openshift-ingress/router-default-5444994796-qp498" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.123972 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.134795 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6e022686-4480-4610-9760-e1487bb99265-service-ca-bundle\") pod \"router-default-5444994796-qp498\" (UID: \"6e022686-4480-4610-9760-e1487bb99265\") " pod="openshift-ingress/router-default-5444994796-qp498" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.144455 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.164941 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.184113 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.204597 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.244632 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.254956 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cf39a416-5cb7-4d33-b794-09fb70b25f4a-images\") pod \"machine-config-operator-74547568cd-2zqzn\" (UID: \"cf39a416-5cb7-4d33-b794-09fb70b25f4a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2zqzn" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.264941 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.274983 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1dcbcf7f-d600-4279-a14f-edf6226c25e2-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-xc46h\" (UID: \"1dcbcf7f-d600-4279-a14f-edf6226c25e2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xc46h" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.284698 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.304462 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.324317 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.326472 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cf39a416-5cb7-4d33-b794-09fb70b25f4a-proxy-tls\") pod \"machine-config-operator-74547568cd-2zqzn\" (UID: \"cf39a416-5cb7-4d33-b794-09fb70b25f4a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2zqzn" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.345115 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.364830 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.375633 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1dcbcf7f-d600-4279-a14f-edf6226c25e2-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-xc46h\" (UID: \"1dcbcf7f-d600-4279-a14f-edf6226c25e2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xc46h" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.384510 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.404713 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.425255 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.444715 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.457043 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/df55607e-2c97-4bd5-b3e0-3a748c6482ef-metrics-tls\") pod \"ingress-operator-5b745b69d9-l57dg\" (UID: \"df55607e-2c97-4bd5-b3e0-3a748c6482ef\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-l57dg" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.468818 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.474351 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/df55607e-2c97-4bd5-b3e0-3a748c6482ef-trusted-ca\") pod \"ingress-operator-5b745b69d9-l57dg\" (UID: \"df55607e-2c97-4bd5-b3e0-3a748c6482ef\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-l57dg" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.484051 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.503587 4710 request.go:700] Waited for 1.009146006s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress-operator/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.504485 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.524063 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.544612 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.564830 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.584189 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.604769 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.625492 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.644624 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.664739 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.684912 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.704341 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.716156 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d4f9a31b-650d-46d5-b8d3-9176e2048beb-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7cjtd\" (UID: \"d4f9a31b-650d-46d5-b8d3-9176e2048beb\") " pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.723984 4710 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/packageserver-service-cert: failed to sync secret cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724025 4710 configmap.go:193] Couldn't get configMap openshift-marketplace/marketplace-trusted-ca: failed to sync configmap cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724031 4710 configmap.go:193] Couldn't get configMap openshift-service-ca/signing-cabundle: failed to sync configmap cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724051 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6907d5b6-8950-45a6-bac5-2bc61c0d8427-webhook-cert podName:6907d5b6-8950-45a6-bac5-2bc61c0d8427 nodeName:}" failed. No retries permitted until 2025-10-09 09:06:59.224033512 +0000 UTC m=+142.714141919 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-cert" (UniqueName: "kubernetes.io/secret/6907d5b6-8950-45a6-bac5-2bc61c0d8427-webhook-cert") pod "packageserver-d55dfcdfc-4jb5d" (UID: "6907d5b6-8950-45a6-bac5-2bc61c0d8427") : failed to sync secret cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724071 4710 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/pprof-cert: failed to sync secret cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724073 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4f054e02-d261-4d4e-9333-b7c469374c24-signing-cabundle podName:4f054e02-d261-4d4e-9333-b7c469374c24 nodeName:}" failed. No retries permitted until 2025-10-09 09:06:59.224059762 +0000 UTC m=+142.714168159 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-cabundle" (UniqueName: "kubernetes.io/configmap/4f054e02-d261-4d4e-9333-b7c469374c24-signing-cabundle") pod "service-ca-9c57cc56f-9lzhs" (UID: "4f054e02-d261-4d4e-9333-b7c469374c24") : failed to sync configmap cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724084 4710 secret.go:188] Couldn't get secret openshift-service-ca-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724091 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d4f9a31b-650d-46d5-b8d3-9176e2048beb-marketplace-trusted-ca podName:d4f9a31b-650d-46d5-b8d3-9176e2048beb nodeName:}" failed. No retries permitted until 2025-10-09 09:06:59.22408542 +0000 UTC m=+142.714193818 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-trusted-ca" (UniqueName: "kubernetes.io/configmap/d4f9a31b-650d-46d5-b8d3-9176e2048beb-marketplace-trusted-ca") pod "marketplace-operator-79b997595-7cjtd" (UID: "d4f9a31b-650d-46d5-b8d3-9176e2048beb") : failed to sync configmap cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724104 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/862286d9-08dd-4330-99ed-04e3b17f2a5b-profile-collector-cert podName:862286d9-08dd-4330-99ed-04e3b17f2a5b nodeName:}" failed. No retries permitted until 2025-10-09 09:06:59.224098746 +0000 UTC m=+142.714207142 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "profile-collector-cert" (UniqueName: "kubernetes.io/secret/862286d9-08dd-4330-99ed-04e3b17f2a5b-profile-collector-cert") pod "olm-operator-6b444d44fb-dgsrr" (UID: "862286d9-08dd-4330-99ed-04e3b17f2a5b") : failed to sync secret cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724114 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e-serving-cert podName:cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e nodeName:}" failed. No retries permitted until 2025-10-09 09:06:59.224109446 +0000 UTC m=+142.714217842 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e-serving-cert") pod "service-ca-operator-777779d784-98nmz" (UID: "cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e") : failed to sync secret cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724118 4710 secret.go:188] Couldn't get secret openshift-service-ca/signing-key: failed to sync secret cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724073 4710 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/packageserver-service-cert: failed to sync secret cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724138 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4f054e02-d261-4d4e-9333-b7c469374c24-signing-key podName:4f054e02-d261-4d4e-9333-b7c469374c24 nodeName:}" failed. No retries permitted until 2025-10-09 09:06:59.224129223 +0000 UTC m=+142.714237620 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-key" (UniqueName: "kubernetes.io/secret/4f054e02-d261-4d4e-9333-b7c469374c24-signing-key") pod "service-ca-9c57cc56f-9lzhs" (UID: "4f054e02-d261-4d4e-9333-b7c469374c24") : failed to sync secret cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724151 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6907d5b6-8950-45a6-bac5-2bc61c0d8427-apiservice-cert podName:6907d5b6-8950-45a6-bac5-2bc61c0d8427 nodeName:}" failed. No retries permitted until 2025-10-09 09:06:59.224144451 +0000 UTC m=+142.714252848 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "apiservice-cert" (UniqueName: "kubernetes.io/secret/6907d5b6-8950-45a6-bac5-2bc61c0d8427-apiservice-cert") pod "packageserver-d55dfcdfc-4jb5d" (UID: "6907d5b6-8950-45a6-bac5-2bc61c0d8427") : failed to sync secret cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724168 4710 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/olm-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724188 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/862286d9-08dd-4330-99ed-04e3b17f2a5b-srv-cert podName:862286d9-08dd-4330-99ed-04e3b17f2a5b nodeName:}" failed. No retries permitted until 2025-10-09 09:06:59.224183484 +0000 UTC m=+142.714291882 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "srv-cert" (UniqueName: "kubernetes.io/secret/862286d9-08dd-4330-99ed-04e3b17f2a5b-srv-cert") pod "olm-operator-6b444d44fb-dgsrr" (UID: "862286d9-08dd-4330-99ed-04e3b17f2a5b") : failed to sync secret cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724206 4710 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/pprof-cert: failed to sync secret cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724232 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d30e7c6-48d6-49dd-b6cc-f983d70eecd0-profile-collector-cert podName:4d30e7c6-48d6-49dd-b6cc-f983d70eecd0 nodeName:}" failed. No retries permitted until 2025-10-09 09:06:59.224228109 +0000 UTC m=+142.714336506 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "profile-collector-cert" (UniqueName: "kubernetes.io/secret/4d30e7c6-48d6-49dd-b6cc-f983d70eecd0-profile-collector-cert") pod "catalog-operator-68c6474976-fbsvl" (UID: "4d30e7c6-48d6-49dd-b6cc-f983d70eecd0") : failed to sync secret cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724261 4710 configmap.go:193] Couldn't get configMap openshift-service-ca-operator/service-ca-operator-config: failed to sync configmap cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724284 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e-config podName:cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e nodeName:}" failed. No retries permitted until 2025-10-09 09:06:59.224276921 +0000 UTC m=+142.714385317 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e-config") pod "service-ca-operator-777779d784-98nmz" (UID: "cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e") : failed to sync configmap cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724544 4710 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/package-server-manager-serving-cert: failed to sync secret cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724596 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2b5de9fd-b25e-4062-9492-71eb94e19a44-package-server-manager-serving-cert podName:2b5de9fd-b25e-4062-9492-71eb94e19a44 nodeName:}" failed. No retries permitted until 2025-10-09 09:06:59.224583709 +0000 UTC m=+142.714692116 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "package-server-manager-serving-cert" (UniqueName: "kubernetes.io/secret/2b5de9fd-b25e-4062-9492-71eb94e19a44-package-server-manager-serving-cert") pod "package-server-manager-789f6589d5-5j8t6" (UID: "2b5de9fd-b25e-4062-9492-71eb94e19a44") : failed to sync secret cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724618 4710 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/catalog-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: E1009 09:06:58.724649 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d30e7c6-48d6-49dd-b6cc-f983d70eecd0-srv-cert podName:4d30e7c6-48d6-49dd-b6cc-f983d70eecd0 nodeName:}" failed. No retries permitted until 2025-10-09 09:06:59.224642038 +0000 UTC m=+142.714750445 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "srv-cert" (UniqueName: "kubernetes.io/secret/4d30e7c6-48d6-49dd-b6cc-f983d70eecd0-srv-cert") pod "catalog-operator-68c6474976-fbsvl" (UID: "4d30e7c6-48d6-49dd-b6cc-f983d70eecd0") : failed to sync secret cache: timed out waiting for the condition Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.729627 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.744620 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.764191 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.785145 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.804333 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.824642 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.844356 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.864602 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.884453 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.904546 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.924819 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.945145 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.964312 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 09 09:06:58 crc kubenswrapper[4710]: I1009 09:06:58.984123 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.004909 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.024607 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.044449 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.065047 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.084904 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.105038 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.125138 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.144184 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.164136 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.184360 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.224134 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.239537 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d4f9a31b-650d-46d5-b8d3-9176e2048beb-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7cjtd\" (UID: \"d4f9a31b-650d-46d5-b8d3-9176e2048beb\") " pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.239571 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6907d5b6-8950-45a6-bac5-2bc61c0d8427-webhook-cert\") pod \"packageserver-d55dfcdfc-4jb5d\" (UID: \"6907d5b6-8950-45a6-bac5-2bc61c0d8427\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.239593 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/862286d9-08dd-4330-99ed-04e3b17f2a5b-profile-collector-cert\") pod \"olm-operator-6b444d44fb-dgsrr\" (UID: \"862286d9-08dd-4330-99ed-04e3b17f2a5b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dgsrr" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.239633 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b5de9fd-b25e-4062-9492-71eb94e19a44-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-5j8t6\" (UID: \"2b5de9fd-b25e-4062-9492-71eb94e19a44\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5j8t6" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.239662 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e-serving-cert\") pod \"service-ca-operator-777779d784-98nmz\" (UID: \"cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-98nmz" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.239733 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/4f054e02-d261-4d4e-9333-b7c469374c24-signing-key\") pod \"service-ca-9c57cc56f-9lzhs\" (UID: \"4f054e02-d261-4d4e-9333-b7c469374c24\") " pod="openshift-service-ca/service-ca-9c57cc56f-9lzhs" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.239750 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6907d5b6-8950-45a6-bac5-2bc61c0d8427-apiservice-cert\") pod \"packageserver-d55dfcdfc-4jb5d\" (UID: \"6907d5b6-8950-45a6-bac5-2bc61c0d8427\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.239780 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/862286d9-08dd-4330-99ed-04e3b17f2a5b-srv-cert\") pod \"olm-operator-6b444d44fb-dgsrr\" (UID: \"862286d9-08dd-4330-99ed-04e3b17f2a5b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dgsrr" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.239799 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e-config\") pod \"service-ca-operator-777779d784-98nmz\" (UID: \"cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-98nmz" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.239846 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/4d30e7c6-48d6-49dd-b6cc-f983d70eecd0-srv-cert\") pod \"catalog-operator-68c6474976-fbsvl\" (UID: \"4d30e7c6-48d6-49dd-b6cc-f983d70eecd0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fbsvl" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.239872 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/4d30e7c6-48d6-49dd-b6cc-f983d70eecd0-profile-collector-cert\") pod \"catalog-operator-68c6474976-fbsvl\" (UID: \"4d30e7c6-48d6-49dd-b6cc-f983d70eecd0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fbsvl" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.239916 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/4f054e02-d261-4d4e-9333-b7c469374c24-signing-cabundle\") pod \"service-ca-9c57cc56f-9lzhs\" (UID: \"4f054e02-d261-4d4e-9333-b7c469374c24\") " pod="openshift-service-ca/service-ca-9c57cc56f-9lzhs" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.240666 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e-config\") pod \"service-ca-operator-777779d784-98nmz\" (UID: \"cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-98nmz" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.240956 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d4f9a31b-650d-46d5-b8d3-9176e2048beb-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7cjtd\" (UID: \"d4f9a31b-650d-46d5-b8d3-9176e2048beb\") " pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.241470 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/4f054e02-d261-4d4e-9333-b7c469374c24-signing-cabundle\") pod \"service-ca-9c57cc56f-9lzhs\" (UID: \"4f054e02-d261-4d4e-9333-b7c469374c24\") " pod="openshift-service-ca/service-ca-9c57cc56f-9lzhs" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.242386 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/4f054e02-d261-4d4e-9333-b7c469374c24-signing-key\") pod \"service-ca-9c57cc56f-9lzhs\" (UID: \"4f054e02-d261-4d4e-9333-b7c469374c24\") " pod="openshift-service-ca/service-ca-9c57cc56f-9lzhs" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.242516 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6907d5b6-8950-45a6-bac5-2bc61c0d8427-webhook-cert\") pod \"packageserver-d55dfcdfc-4jb5d\" (UID: \"6907d5b6-8950-45a6-bac5-2bc61c0d8427\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.242556 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/4d30e7c6-48d6-49dd-b6cc-f983d70eecd0-profile-collector-cert\") pod \"catalog-operator-68c6474976-fbsvl\" (UID: \"4d30e7c6-48d6-49dd-b6cc-f983d70eecd0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fbsvl" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.243474 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/862286d9-08dd-4330-99ed-04e3b17f2a5b-profile-collector-cert\") pod \"olm-operator-6b444d44fb-dgsrr\" (UID: \"862286d9-08dd-4330-99ed-04e3b17f2a5b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dgsrr" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.243600 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/862286d9-08dd-4330-99ed-04e3b17f2a5b-srv-cert\") pod \"olm-operator-6b444d44fb-dgsrr\" (UID: \"862286d9-08dd-4330-99ed-04e3b17f2a5b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dgsrr" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.244129 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e-serving-cert\") pod \"service-ca-operator-777779d784-98nmz\" (UID: \"cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-98nmz" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.244288 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.244513 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6907d5b6-8950-45a6-bac5-2bc61c0d8427-apiservice-cert\") pod \"packageserver-d55dfcdfc-4jb5d\" (UID: \"6907d5b6-8950-45a6-bac5-2bc61c0d8427\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.245241 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/4d30e7c6-48d6-49dd-b6cc-f983d70eecd0-srv-cert\") pod \"catalog-operator-68c6474976-fbsvl\" (UID: \"4d30e7c6-48d6-49dd-b6cc-f983d70eecd0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fbsvl" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.245489 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b5de9fd-b25e-4062-9492-71eb94e19a44-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-5j8t6\" (UID: \"2b5de9fd-b25e-4062-9492-71eb94e19a44\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5j8t6" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.265161 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.284602 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.304996 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.325075 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.344462 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.364609 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.384791 4710 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.406744 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.438070 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrxrv\" (UniqueName: \"kubernetes.io/projected/ae732ae0-1a69-47f0-8895-a4a9ef61ae1a-kube-api-access-lrxrv\") pod \"console-operator-58897d9998-2wlbg\" (UID: \"ae732ae0-1a69-47f0-8895-a4a9ef61ae1a\") " pod="openshift-console-operator/console-operator-58897d9998-2wlbg" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.454636 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mb7kr\" (UniqueName: \"kubernetes.io/projected/16efdb77-5db5-410b-9f59-aed6293dbcab-kube-api-access-mb7kr\") pod \"machine-approver-56656f9798-wt2d2\" (UID: \"16efdb77-5db5-410b-9f59-aed6293dbcab\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wt2d2" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.475634 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jxfg\" (UniqueName: \"kubernetes.io/projected/53565bc1-575c-4410-aaea-f6016117621f-kube-api-access-4jxfg\") pod \"authentication-operator-69f744f599-7h4pp\" (UID: \"53565bc1-575c-4410-aaea-f6016117621f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-7h4pp" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.496297 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9szvm\" (UniqueName: \"kubernetes.io/projected/58d085ee-1389-48a6-b185-a036265014d2-kube-api-access-9szvm\") pod \"route-controller-manager-6576b87f9c-5gtj2\" (UID: \"58d085ee-1389-48a6-b185-a036265014d2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.514593 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzfm8\" (UniqueName: \"kubernetes.io/projected/4e0e1068-6e20-498c-a8b0-c61513824a86-kube-api-access-zzfm8\") pod \"openshift-apiserver-operator-796bbdcf4f-dm7lm\" (UID: \"4e0e1068-6e20-498c-a8b0-c61513824a86\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dm7lm" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.523547 4710 request.go:700] Waited for 1.901319792s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console/serviceaccounts/console/token Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.534718 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9bxg\" (UniqueName: \"kubernetes.io/projected/0ee99015-bffc-4ffb-a91c-f941cd33acd7-kube-api-access-m9bxg\") pod \"console-f9d7485db-5q44l\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.538415 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-7h4pp" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.553015 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.555225 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gcdm\" (UniqueName: \"kubernetes.io/projected/c3193888-6214-44cb-a0bc-0091046b80c2-kube-api-access-7gcdm\") pod \"machine-api-operator-5694c8668f-2k94j\" (UID: \"c3193888-6214-44cb-a0bc-0091046b80c2\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-2k94j" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.577384 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gj84\" (UniqueName: \"kubernetes.io/projected/d682c234-918c-4189-91bb-09e90f9da4db-kube-api-access-5gj84\") pod \"dns-operator-744455d44c-69jvv\" (UID: \"d682c234-918c-4189-91bb-09e90f9da4db\") " pod="openshift-dns-operator/dns-operator-744455d44c-69jvv" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.598030 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wt2d2" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.599134 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bbk7\" (UniqueName: \"kubernetes.io/projected/e6867809-73e3-4291-97d4-cb38b0aeae7b-kube-api-access-6bbk7\") pod \"controller-manager-879f6c89f-lgdvx\" (UID: \"e6867809-73e3-4291-97d4-cb38b0aeae7b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.619200 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsbmm\" (UniqueName: \"kubernetes.io/projected/6fc18397-5fbe-41c5-a28a-eb550cc22dcd-kube-api-access-rsbmm\") pod \"cluster-samples-operator-665b6dd947-4zp57\" (UID: \"6fc18397-5fbe-41c5-a28a-eb550cc22dcd\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4zp57" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.629057 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-69jvv" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.637057 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-2wlbg" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.647493 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7s5rk\" (UniqueName: \"kubernetes.io/projected/dd0a0a80-0cd2-4efe-b988-8f06409dbfac-kube-api-access-7s5rk\") pod \"apiserver-76f77b778f-8598s\" (UID: \"dd0a0a80-0cd2-4efe-b988-8f06409dbfac\") " pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.650682 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.658251 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2l4md\" (UniqueName: \"kubernetes.io/projected/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-kube-api-access-2l4md\") pod \"oauth-openshift-558db77b4-7d642\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.674517 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-7h4pp"] Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.681933 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4q5rc\" (UniqueName: \"kubernetes.io/projected/23aab9d2-bc11-496c-879b-14b3fe7d7dd7-kube-api-access-4q5rc\") pod \"apiserver-7bbb656c7d-v8np5\" (UID: \"23aab9d2-bc11-496c-879b-14b3fe7d7dd7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.697317 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flm7p\" (UniqueName: \"kubernetes.io/projected/1dcbcf7f-d600-4279-a14f-edf6226c25e2-kube-api-access-flm7p\") pod \"kube-storage-version-migrator-operator-b67b599dd-xc46h\" (UID: \"1dcbcf7f-d600-4279-a14f-edf6226c25e2\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xc46h" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.717963 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26bnb\" (UniqueName: \"kubernetes.io/projected/2b5de9fd-b25e-4062-9492-71eb94e19a44-kube-api-access-26bnb\") pod \"package-server-manager-789f6589d5-5j8t6\" (UID: \"2b5de9fd-b25e-4062-9492-71eb94e19a44\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5j8t6" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.727711 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2"] Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.742017 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4jzh\" (UniqueName: \"kubernetes.io/projected/d4f9a31b-650d-46d5-b8d3-9176e2048beb-kube-api-access-t4jzh\") pod \"marketplace-operator-79b997595-7cjtd\" (UID: \"d4f9a31b-650d-46d5-b8d3-9176e2048beb\") " pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.755912 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nz68r\" (UniqueName: \"kubernetes.io/projected/df55607e-2c97-4bd5-b3e0-3a748c6482ef-kube-api-access-nz68r\") pod \"ingress-operator-5b745b69d9-l57dg\" (UID: \"df55607e-2c97-4bd5-b3e0-3a748c6482ef\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-l57dg" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.765171 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xc46h" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.777066 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zq52\" (UniqueName: \"kubernetes.io/projected/cf39a416-5cb7-4d33-b794-09fb70b25f4a-kube-api-access-9zq52\") pod \"machine-config-operator-74547568cd-2zqzn\" (UID: \"cf39a416-5cb7-4d33-b794-09fb70b25f4a\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2zqzn" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.784160 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.788421 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.792721 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-2k94j" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.799999 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dm7lm" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.800583 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5j8t6" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.803793 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfctl\" (UniqueName: \"kubernetes.io/projected/4f054e02-d261-4d4e-9333-b7c469374c24-kube-api-access-dfctl\") pod \"service-ca-9c57cc56f-9lzhs\" (UID: \"4f054e02-d261-4d4e-9333-b7c469374c24\") " pod="openshift-service-ca/service-ca-9c57cc56f-9lzhs" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.808019 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.810366 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-5q44l"] Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.817220 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcjmw\" (UniqueName: \"kubernetes.io/projected/6e022686-4480-4610-9760-e1487bb99265-kube-api-access-qcjmw\") pod \"router-default-5444994796-qp498\" (UID: \"6e022686-4480-4610-9760-e1487bb99265\") " pod="openshift-ingress/router-default-5444994796-qp498" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.817444 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.822721 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-9lzhs" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.830794 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2wlbg"] Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.841302 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rscbr\" (UniqueName: \"kubernetes.io/projected/6907d5b6-8950-45a6-bac5-2bc61c0d8427-kube-api-access-rscbr\") pod \"packageserver-d55dfcdfc-4jb5d\" (UID: \"6907d5b6-8950-45a6-bac5-2bc61c0d8427\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.862803 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5845k\" (UniqueName: \"kubernetes.io/projected/778aaf63-ea06-45a1-b031-efb3809bf0ec-kube-api-access-5845k\") pod \"migrator-59844c95c7-vxwmd\" (UID: \"778aaf63-ea06-45a1-b031-efb3809bf0ec\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vxwmd" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.869810 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-69jvv"] Oct 09 09:06:59 crc kubenswrapper[4710]: W1009 09:06:59.871848 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podae732ae0_1a69_47f0_8895_a4a9ef61ae1a.slice/crio-9cdd61612277eed23050b13f28353bf2cfac03d72d84d7ca0579982a8ac29172 WatchSource:0}: Error finding container 9cdd61612277eed23050b13f28353bf2cfac03d72d84d7ca0579982a8ac29172: Status 404 returned error can't find the container with id 9cdd61612277eed23050b13f28353bf2cfac03d72d84d7ca0579982a8ac29172 Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.889898 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgkhs\" (UniqueName: \"kubernetes.io/projected/d3f821a1-ba36-423a-b20b-82dc307a8c22-kube-api-access-xgkhs\") pod \"openshift-controller-manager-operator-756b6f6bc6-f4bff\" (UID: \"d3f821a1-ba36-423a-b20b-82dc307a8c22\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f4bff" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.898227 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtlxf\" (UniqueName: \"kubernetes.io/projected/7af80181-f4a1-4112-8792-87f958d2f22e-kube-api-access-gtlxf\") pod \"etcd-operator-b45778765-48qbf\" (UID: \"7af80181-f4a1-4112-8792-87f958d2f22e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" Oct 09 09:06:59 crc kubenswrapper[4710]: W1009 09:06:59.905095 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd682c234_918c_4189_91bb_09e90f9da4db.slice/crio-90a47197319717d4bde3e5f9e1b34c409f6cb23ab3913ee0aa1bd136695643ff WatchSource:0}: Error finding container 90a47197319717d4bde3e5f9e1b34c409f6cb23ab3913ee0aa1bd136695643ff: Status 404 returned error can't find the container with id 90a47197319717d4bde3e5f9e1b34c409f6cb23ab3913ee0aa1bd136695643ff Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.908846 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.920298 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4zp57" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.923372 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvfh9\" (UniqueName: \"kubernetes.io/projected/4d30e7c6-48d6-49dd-b6cc-f983d70eecd0-kube-api-access-lvfh9\") pod \"catalog-operator-68c6474976-fbsvl\" (UID: \"4d30e7c6-48d6-49dd-b6cc-f983d70eecd0\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fbsvl" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.937529 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3aae1458-34de-4abf-b57e-8d3aefaeb644-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-tvt5b\" (UID: \"3aae1458-34de-4abf-b57e-8d3aefaeb644\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-tvt5b" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.963307 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlxgs\" (UniqueName: \"kubernetes.io/projected/5edaffb3-e33b-45b9-89fa-39322022da37-kube-api-access-mlxgs\") pod \"downloads-7954f5f757-bmqbd\" (UID: \"5edaffb3-e33b-45b9-89fa-39322022da37\") " pod="openshift-console/downloads-7954f5f757-bmqbd" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.969712 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-bmqbd" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.982788 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.984934 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkpbt\" (UniqueName: \"kubernetes.io/projected/cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e-kube-api-access-dkpbt\") pod \"service-ca-operator-777779d784-98nmz\" (UID: \"cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-98nmz" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.997038 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f4bff" Oct 09 09:06:59 crc kubenswrapper[4710]: I1009 09:06:59.998243 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xc46h"] Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.001951 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1a0ab392-4bad-4a96-b6cc-14b706777850-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2rs7x\" (UID: \"1a0ab392-4bad-4a96-b6cc-14b706777850\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rs7x" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.006055 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-tvt5b" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.013694 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-qp498" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.017853 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/df55607e-2c97-4bd5-b3e0-3a748c6482ef-bound-sa-token\") pod \"ingress-operator-5b745b69d9-l57dg\" (UID: \"df55607e-2c97-4bd5-b3e0-3a748c6482ef\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-l57dg" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.036763 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9kz2\" (UniqueName: \"kubernetes.io/projected/862286d9-08dd-4330-99ed-04e3b17f2a5b-kube-api-access-l9kz2\") pod \"olm-operator-6b444d44fb-dgsrr\" (UID: \"862286d9-08dd-4330-99ed-04e3b17f2a5b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dgsrr" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.054892 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2zqzn" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.064307 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsdrs\" (UniqueName: \"kubernetes.io/projected/fc2e287d-2f35-40d9-ac9a-e8a67e3ea01e-kube-api-access-wsdrs\") pod \"machine-config-controller-84d6567774-klx5p\" (UID: \"fc2e287d-2f35-40d9-ac9a-e8a67e3ea01e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-klx5p" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.081077 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-l57dg" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.082988 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vxwmd" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.086110 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9c3efa77-9487-4940-a894-e8a10f0c9453-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dssr9\" (UID: \"9c3efa77-9487-4940-a894-e8a10f0c9453\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dssr9" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.095174 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fbsvl" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.106563 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dgsrr" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.110772 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.118132 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-98nmz" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.154120 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/2de208fc-6343-4e47-a533-8914c1cdd981-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-tmzpb\" (UID: \"2de208fc-6343-4e47-a533-8914c1cdd981\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tmzpb" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.154162 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/51644181-d79b-4704-873f-d3c13740f656-installation-pull-secrets\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.154180 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bd8accf5-de9b-4eeb-9a75-1a90317d72ea-serving-cert\") pod \"openshift-config-operator-7777fb866f-5bjcn\" (UID: \"bd8accf5-de9b-4eeb-9a75-1a90317d72ea\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5bjcn" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.154197 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvqhj\" (UniqueName: \"kubernetes.io/projected/0d1bd14e-f44d-4db3-bd1e-4e65033b9971-kube-api-access-zvqhj\") pod \"dns-default-ckn5c\" (UID: \"0d1bd14e-f44d-4db3-bd1e-4e65033b9971\") " pod="openshift-dns/dns-default-ckn5c" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.154237 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0d1bd14e-f44d-4db3-bd1e-4e65033b9971-config-volume\") pod \"dns-default-ckn5c\" (UID: \"0d1bd14e-f44d-4db3-bd1e-4e65033b9971\") " pod="openshift-dns/dns-default-ckn5c" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.154261 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2f367bf0-fb0c-4884-b874-d4a426a8ff5a-config-volume\") pod \"collect-profiles-29333340-nq2tx\" (UID: \"2f367bf0-fb0c-4884-b874-d4a426a8ff5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.154278 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4a6f9a69-a9b0-4da3-b986-7fa2013592eb-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-qdh9l\" (UID: \"4a6f9a69-a9b0-4da3-b986-7fa2013592eb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qdh9l" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.154303 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2f367bf0-fb0c-4884-b874-d4a426a8ff5a-secret-volume\") pod \"collect-profiles-29333340-nq2tx\" (UID: \"2f367bf0-fb0c-4884-b874-d4a426a8ff5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.154322 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/51378278-0202-4be1-96a8-28f4c81a6aae-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-fkhwq\" (UID: \"51378278-0202-4be1-96a8-28f4c81a6aae\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fkhwq" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.154345 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.154364 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6n4r\" (UniqueName: \"kubernetes.io/projected/2f367bf0-fb0c-4884-b874-d4a426a8ff5a-kube-api-access-p6n4r\") pod \"collect-profiles-29333340-nq2tx\" (UID: \"2f367bf0-fb0c-4884-b874-d4a426a8ff5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.154496 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/51644181-d79b-4704-873f-d3c13740f656-bound-sa-token\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: E1009 09:07:00.154709 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:00.654697743 +0000 UTC m=+144.144806140 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.155074 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/51644181-d79b-4704-873f-d3c13740f656-trusted-ca\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.155098 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4n7n\" (UniqueName: \"kubernetes.io/projected/bd8accf5-de9b-4eeb-9a75-1a90317d72ea-kube-api-access-d4n7n\") pod \"openshift-config-operator-7777fb866f-5bjcn\" (UID: \"bd8accf5-de9b-4eeb-9a75-1a90317d72ea\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5bjcn" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.155120 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0d1bd14e-f44d-4db3-bd1e-4e65033b9971-metrics-tls\") pod \"dns-default-ckn5c\" (UID: \"0d1bd14e-f44d-4db3-bd1e-4e65033b9971\") " pod="openshift-dns/dns-default-ckn5c" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.155170 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/51644181-d79b-4704-873f-d3c13740f656-registry-tls\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.155189 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2de208fc-6343-4e47-a533-8914c1cdd981-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-tmzpb\" (UID: \"2de208fc-6343-4e47-a533-8914c1cdd981\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tmzpb" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.155206 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bd8accf5-de9b-4eeb-9a75-1a90317d72ea-available-featuregates\") pod \"openshift-config-operator-7777fb866f-5bjcn\" (UID: \"bd8accf5-de9b-4eeb-9a75-1a90317d72ea\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5bjcn" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.155238 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7wh9\" (UniqueName: \"kubernetes.io/projected/2de208fc-6343-4e47-a533-8914c1cdd981-kube-api-access-w7wh9\") pod \"cluster-image-registry-operator-dc59b4c8b-tmzpb\" (UID: \"2de208fc-6343-4e47-a533-8914c1cdd981\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tmzpb" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.155260 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/51644181-d79b-4704-873f-d3c13740f656-ca-trust-extracted\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.155275 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqx7t\" (UniqueName: \"kubernetes.io/projected/51378278-0202-4be1-96a8-28f4c81a6aae-kube-api-access-cqx7t\") pod \"control-plane-machine-set-operator-78cbb6b69f-fkhwq\" (UID: \"51378278-0202-4be1-96a8-28f4c81a6aae\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fkhwq" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.155305 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/51644181-d79b-4704-873f-d3c13740f656-registry-certificates\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.155319 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2de208fc-6343-4e47-a533-8914c1cdd981-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-tmzpb\" (UID: \"2de208fc-6343-4e47-a533-8914c1cdd981\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tmzpb" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.155335 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzgw4\" (UniqueName: \"kubernetes.io/projected/4a6f9a69-a9b0-4da3-b986-7fa2013592eb-kube-api-access-gzgw4\") pod \"multus-admission-controller-857f4d67dd-qdh9l\" (UID: \"4a6f9a69-a9b0-4da3-b986-7fa2013592eb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qdh9l" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.155350 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5ddr\" (UniqueName: \"kubernetes.io/projected/51644181-d79b-4704-873f-d3c13740f656-kube-api-access-v5ddr\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.192464 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-9lzhs"] Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.217394 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lgdvx"] Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.256118 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258080 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7wh9\" (UniqueName: \"kubernetes.io/projected/2de208fc-6343-4e47-a533-8914c1cdd981-kube-api-access-w7wh9\") pod \"cluster-image-registry-operator-dc59b4c8b-tmzpb\" (UID: \"2de208fc-6343-4e47-a533-8914c1cdd981\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tmzpb" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258172 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/51644181-d79b-4704-873f-d3c13740f656-ca-trust-extracted\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258203 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqx7t\" (UniqueName: \"kubernetes.io/projected/51378278-0202-4be1-96a8-28f4c81a6aae-kube-api-access-cqx7t\") pod \"control-plane-machine-set-operator-78cbb6b69f-fkhwq\" (UID: \"51378278-0202-4be1-96a8-28f4c81a6aae\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fkhwq" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258235 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac-registration-dir\") pod \"csi-hostpathplugin-nnk8n\" (UID: \"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac\") " pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258251 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5j7x\" (UniqueName: \"kubernetes.io/projected/c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac-kube-api-access-v5j7x\") pod \"csi-hostpathplugin-nnk8n\" (UID: \"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac\") " pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258306 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/51644181-d79b-4704-873f-d3c13740f656-registry-certificates\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258321 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2de208fc-6343-4e47-a533-8914c1cdd981-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-tmzpb\" (UID: \"2de208fc-6343-4e47-a533-8914c1cdd981\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tmzpb" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258348 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzgw4\" (UniqueName: \"kubernetes.io/projected/4a6f9a69-a9b0-4da3-b986-7fa2013592eb-kube-api-access-gzgw4\") pod \"multus-admission-controller-857f4d67dd-qdh9l\" (UID: \"4a6f9a69-a9b0-4da3-b986-7fa2013592eb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qdh9l" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258371 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5ddr\" (UniqueName: \"kubernetes.io/projected/51644181-d79b-4704-873f-d3c13740f656-kube-api-access-v5ddr\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258413 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac-socket-dir\") pod \"csi-hostpathplugin-nnk8n\" (UID: \"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac\") " pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258459 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/2de208fc-6343-4e47-a533-8914c1cdd981-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-tmzpb\" (UID: \"2de208fc-6343-4e47-a533-8914c1cdd981\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tmzpb" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258517 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qz9ht\" (UniqueName: \"kubernetes.io/projected/052645dc-5ed5-4e24-a940-840efe4862c6-kube-api-access-qz9ht\") pod \"ingress-canary-vzjfq\" (UID: \"052645dc-5ed5-4e24-a940-840efe4862c6\") " pod="openshift-ingress-canary/ingress-canary-vzjfq" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258532 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjxpl\" (UniqueName: \"kubernetes.io/projected/b7d19303-45b1-4774-b2c7-8fe7c4d3ad83-kube-api-access-mjxpl\") pod \"machine-config-server-wng2k\" (UID: \"b7d19303-45b1-4774-b2c7-8fe7c4d3ad83\") " pod="openshift-machine-config-operator/machine-config-server-wng2k" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258547 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/51644181-d79b-4704-873f-d3c13740f656-installation-pull-secrets\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258561 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bd8accf5-de9b-4eeb-9a75-1a90317d72ea-serving-cert\") pod \"openshift-config-operator-7777fb866f-5bjcn\" (UID: \"bd8accf5-de9b-4eeb-9a75-1a90317d72ea\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5bjcn" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258576 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvqhj\" (UniqueName: \"kubernetes.io/projected/0d1bd14e-f44d-4db3-bd1e-4e65033b9971-kube-api-access-zvqhj\") pod \"dns-default-ckn5c\" (UID: \"0d1bd14e-f44d-4db3-bd1e-4e65033b9971\") " pod="openshift-dns/dns-default-ckn5c" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258656 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0d1bd14e-f44d-4db3-bd1e-4e65033b9971-config-volume\") pod \"dns-default-ckn5c\" (UID: \"0d1bd14e-f44d-4db3-bd1e-4e65033b9971\") " pod="openshift-dns/dns-default-ckn5c" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258683 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/b7d19303-45b1-4774-b2c7-8fe7c4d3ad83-certs\") pod \"machine-config-server-wng2k\" (UID: \"b7d19303-45b1-4774-b2c7-8fe7c4d3ad83\") " pod="openshift-machine-config-operator/machine-config-server-wng2k" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258714 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2f367bf0-fb0c-4884-b874-d4a426a8ff5a-config-volume\") pod \"collect-profiles-29333340-nq2tx\" (UID: \"2f367bf0-fb0c-4884-b874-d4a426a8ff5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258743 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4a6f9a69-a9b0-4da3-b986-7fa2013592eb-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-qdh9l\" (UID: \"4a6f9a69-a9b0-4da3-b986-7fa2013592eb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qdh9l" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258801 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2f367bf0-fb0c-4884-b874-d4a426a8ff5a-secret-volume\") pod \"collect-profiles-29333340-nq2tx\" (UID: \"2f367bf0-fb0c-4884-b874-d4a426a8ff5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258816 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac-mountpoint-dir\") pod \"csi-hostpathplugin-nnk8n\" (UID: \"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac\") " pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258855 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/51378278-0202-4be1-96a8-28f4c81a6aae-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-fkhwq\" (UID: \"51378278-0202-4be1-96a8-28f4c81a6aae\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fkhwq" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258942 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6n4r\" (UniqueName: \"kubernetes.io/projected/2f367bf0-fb0c-4884-b874-d4a426a8ff5a-kube-api-access-p6n4r\") pod \"collect-profiles-29333340-nq2tx\" (UID: \"2f367bf0-fb0c-4884-b874-d4a426a8ff5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258964 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/51644181-d79b-4704-873f-d3c13740f656-bound-sa-token\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.258997 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/51644181-d79b-4704-873f-d3c13740f656-trusted-ca\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.259012 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4n7n\" (UniqueName: \"kubernetes.io/projected/bd8accf5-de9b-4eeb-9a75-1a90317d72ea-kube-api-access-d4n7n\") pod \"openshift-config-operator-7777fb866f-5bjcn\" (UID: \"bd8accf5-de9b-4eeb-9a75-1a90317d72ea\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5bjcn" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.259032 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0d1bd14e-f44d-4db3-bd1e-4e65033b9971-metrics-tls\") pod \"dns-default-ckn5c\" (UID: \"0d1bd14e-f44d-4db3-bd1e-4e65033b9971\") " pod="openshift-dns/dns-default-ckn5c" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.259104 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/b7d19303-45b1-4774-b2c7-8fe7c4d3ad83-node-bootstrap-token\") pod \"machine-config-server-wng2k\" (UID: \"b7d19303-45b1-4774-b2c7-8fe7c4d3ad83\") " pod="openshift-machine-config-operator/machine-config-server-wng2k" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.259136 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/51644181-d79b-4704-873f-d3c13740f656-registry-tls\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.259161 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2de208fc-6343-4e47-a533-8914c1cdd981-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-tmzpb\" (UID: \"2de208fc-6343-4e47-a533-8914c1cdd981\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tmzpb" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.259176 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/052645dc-5ed5-4e24-a940-840efe4862c6-cert\") pod \"ingress-canary-vzjfq\" (UID: \"052645dc-5ed5-4e24-a940-840efe4862c6\") " pod="openshift-ingress-canary/ingress-canary-vzjfq" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.259233 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bd8accf5-de9b-4eeb-9a75-1a90317d72ea-available-featuregates\") pod \"openshift-config-operator-7777fb866f-5bjcn\" (UID: \"bd8accf5-de9b-4eeb-9a75-1a90317d72ea\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5bjcn" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.259247 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac-plugins-dir\") pod \"csi-hostpathplugin-nnk8n\" (UID: \"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac\") " pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.259269 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac-csi-data-dir\") pod \"csi-hostpathplugin-nnk8n\" (UID: \"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac\") " pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" Oct 09 09:07:00 crc kubenswrapper[4710]: E1009 09:07:00.259352 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:00.759340196 +0000 UTC m=+144.249448593 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.274263 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/51644181-d79b-4704-873f-d3c13740f656-registry-certificates\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.278446 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/51644181-d79b-4704-873f-d3c13740f656-ca-trust-extracted\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.278923 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2f367bf0-fb0c-4884-b874-d4a426a8ff5a-secret-volume\") pod \"collect-profiles-29333340-nq2tx\" (UID: \"2f367bf0-fb0c-4884-b874-d4a426a8ff5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.283520 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7d642"] Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.283868 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2de208fc-6343-4e47-a533-8914c1cdd981-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-tmzpb\" (UID: \"2de208fc-6343-4e47-a533-8914c1cdd981\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tmzpb" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.296239 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/51378278-0202-4be1-96a8-28f4c81a6aae-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-fkhwq\" (UID: \"51378278-0202-4be1-96a8-28f4c81a6aae\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fkhwq" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.297525 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/51644181-d79b-4704-873f-d3c13740f656-trusted-ca\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.306376 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dssr9" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.314590 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bd8accf5-de9b-4eeb-9a75-1a90317d72ea-available-featuregates\") pod \"openshift-config-operator-7777fb866f-5bjcn\" (UID: \"bd8accf5-de9b-4eeb-9a75-1a90317d72ea\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5bjcn" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.328140 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2f367bf0-fb0c-4884-b874-d4a426a8ff5a-config-volume\") pod \"collect-profiles-29333340-nq2tx\" (UID: \"2f367bf0-fb0c-4884-b874-d4a426a8ff5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.330339 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/51644181-d79b-4704-873f-d3c13740f656-installation-pull-secrets\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.331076 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dm7lm"] Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.333139 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0d1bd14e-f44d-4db3-bd1e-4e65033b9971-metrics-tls\") pod \"dns-default-ckn5c\" (UID: \"0d1bd14e-f44d-4db3-bd1e-4e65033b9971\") " pod="openshift-dns/dns-default-ckn5c" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.334570 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-klx5p" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.334720 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rs7x" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.337193 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0d1bd14e-f44d-4db3-bd1e-4e65033b9971-config-volume\") pod \"dns-default-ckn5c\" (UID: \"0d1bd14e-f44d-4db3-bd1e-4e65033b9971\") " pod="openshift-dns/dns-default-ckn5c" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.337741 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/2de208fc-6343-4e47-a533-8914c1cdd981-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-tmzpb\" (UID: \"2de208fc-6343-4e47-a533-8914c1cdd981\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tmzpb" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.338078 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/51644181-d79b-4704-873f-d3c13740f656-registry-tls\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.343929 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7wh9\" (UniqueName: \"kubernetes.io/projected/2de208fc-6343-4e47-a533-8914c1cdd981-kube-api-access-w7wh9\") pod \"cluster-image-registry-operator-dc59b4c8b-tmzpb\" (UID: \"2de208fc-6343-4e47-a533-8914c1cdd981\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tmzpb" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.344566 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-5q44l" event={"ID":"0ee99015-bffc-4ffb-a91c-f941cd33acd7","Type":"ContainerStarted","Data":"429a5b39b22217b58be51cffc574ec318f2dd0786dc7c00312f3de7edd8af5a7"} Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.344644 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-5q44l" event={"ID":"0ee99015-bffc-4ffb-a91c-f941cd33acd7","Type":"ContainerStarted","Data":"3a68aeefe4a54f081c5eeb9d0a96928612b7dff0b07e1f34fa935d4753f56a37"} Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.345872 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-2k94j"] Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.345975 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-8598s"] Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.349897 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqx7t\" (UniqueName: \"kubernetes.io/projected/51378278-0202-4be1-96a8-28f4c81a6aae-kube-api-access-cqx7t\") pod \"control-plane-machine-set-operator-78cbb6b69f-fkhwq\" (UID: \"51378278-0202-4be1-96a8-28f4c81a6aae\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fkhwq" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.350629 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7cjtd"] Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.357277 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4a6f9a69-a9b0-4da3-b986-7fa2013592eb-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-qdh9l\" (UID: \"4a6f9a69-a9b0-4da3-b986-7fa2013592eb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qdh9l" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.358301 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4zp57"] Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.363514 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" event={"ID":"e6867809-73e3-4291-97d4-cb38b0aeae7b","Type":"ContainerStarted","Data":"276b9748dbafbbc6d94ab25ead6692a1e2f096b7760f554f3f7cd7eef2ec3f34"} Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.367129 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bd8accf5-de9b-4eeb-9a75-1a90317d72ea-serving-cert\") pod \"openshift-config-operator-7777fb866f-5bjcn\" (UID: \"bd8accf5-de9b-4eeb-9a75-1a90317d72ea\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5bjcn" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.378270 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wt2d2" event={"ID":"16efdb77-5db5-410b-9f59-aed6293dbcab","Type":"ContainerStarted","Data":"8cc78215b1b7d6060d1af486a42e100f47fd2b1eea54742623d9ce4daea45039"} Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.378306 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wt2d2" event={"ID":"16efdb77-5db5-410b-9f59-aed6293dbcab","Type":"ContainerStarted","Data":"25e5e68745caebff8cf0f712ec23a9be645a8ec9d74316874a7eb6c0bc121cdc"} Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.378352 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4n7n\" (UniqueName: \"kubernetes.io/projected/bd8accf5-de9b-4eeb-9a75-1a90317d72ea-kube-api-access-d4n7n\") pod \"openshift-config-operator-7777fb866f-5bjcn\" (UID: \"bd8accf5-de9b-4eeb-9a75-1a90317d72ea\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-5bjcn" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.383548 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/51644181-d79b-4704-873f-d3c13740f656-bound-sa-token\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.384113 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fkhwq" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.384423 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac-mountpoint-dir\") pod \"csi-hostpathplugin-nnk8n\" (UID: \"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac\") " pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.384477 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.384516 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/b7d19303-45b1-4774-b2c7-8fe7c4d3ad83-node-bootstrap-token\") pod \"machine-config-server-wng2k\" (UID: \"b7d19303-45b1-4774-b2c7-8fe7c4d3ad83\") " pod="openshift-machine-config-operator/machine-config-server-wng2k" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.384541 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/052645dc-5ed5-4e24-a940-840efe4862c6-cert\") pod \"ingress-canary-vzjfq\" (UID: \"052645dc-5ed5-4e24-a940-840efe4862c6\") " pod="openshift-ingress-canary/ingress-canary-vzjfq" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.384557 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac-plugins-dir\") pod \"csi-hostpathplugin-nnk8n\" (UID: \"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac\") " pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.384571 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac-csi-data-dir\") pod \"csi-hostpathplugin-nnk8n\" (UID: \"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac\") " pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.384588 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac-registration-dir\") pod \"csi-hostpathplugin-nnk8n\" (UID: \"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac\") " pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.384603 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5j7x\" (UniqueName: \"kubernetes.io/projected/c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac-kube-api-access-v5j7x\") pod \"csi-hostpathplugin-nnk8n\" (UID: \"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac\") " pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.384637 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac-socket-dir\") pod \"csi-hostpathplugin-nnk8n\" (UID: \"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac\") " pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.384656 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qz9ht\" (UniqueName: \"kubernetes.io/projected/052645dc-5ed5-4e24-a940-840efe4862c6-kube-api-access-qz9ht\") pod \"ingress-canary-vzjfq\" (UID: \"052645dc-5ed5-4e24-a940-840efe4862c6\") " pod="openshift-ingress-canary/ingress-canary-vzjfq" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.384668 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjxpl\" (UniqueName: \"kubernetes.io/projected/b7d19303-45b1-4774-b2c7-8fe7c4d3ad83-kube-api-access-mjxpl\") pod \"machine-config-server-wng2k\" (UID: \"b7d19303-45b1-4774-b2c7-8fe7c4d3ad83\") " pod="openshift-machine-config-operator/machine-config-server-wng2k" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.384702 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/b7d19303-45b1-4774-b2c7-8fe7c4d3ad83-certs\") pod \"machine-config-server-wng2k\" (UID: \"b7d19303-45b1-4774-b2c7-8fe7c4d3ad83\") " pod="openshift-machine-config-operator/machine-config-server-wng2k" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.384863 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac-csi-data-dir\") pod \"csi-hostpathplugin-nnk8n\" (UID: \"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac\") " pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.384926 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac-mountpoint-dir\") pod \"csi-hostpathplugin-nnk8n\" (UID: \"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac\") " pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" Oct 09 09:07:00 crc kubenswrapper[4710]: E1009 09:07:00.385172 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:00.88516152 +0000 UTC m=+144.375269917 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.387107 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac-plugins-dir\") pod \"csi-hostpathplugin-nnk8n\" (UID: \"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac\") " pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.387187 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac-socket-dir\") pod \"csi-hostpathplugin-nnk8n\" (UID: \"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac\") " pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.387239 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac-registration-dir\") pod \"csi-hostpathplugin-nnk8n\" (UID: \"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac\") " pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.391951 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5j8t6"] Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.392585 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/b7d19303-45b1-4774-b2c7-8fe7c4d3ad83-node-bootstrap-token\") pod \"machine-config-server-wng2k\" (UID: \"b7d19303-45b1-4774-b2c7-8fe7c4d3ad83\") " pod="openshift-machine-config-operator/machine-config-server-wng2k" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.393657 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6n4r\" (UniqueName: \"kubernetes.io/projected/2f367bf0-fb0c-4884-b874-d4a426a8ff5a-kube-api-access-p6n4r\") pod \"collect-profiles-29333340-nq2tx\" (UID: \"2f367bf0-fb0c-4884-b874-d4a426a8ff5a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.406840 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/052645dc-5ed5-4e24-a940-840efe4862c6-cert\") pod \"ingress-canary-vzjfq\" (UID: \"052645dc-5ed5-4e24-a940-840efe4862c6\") " pod="openshift-ingress-canary/ingress-canary-vzjfq" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.418811 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" event={"ID":"58d085ee-1389-48a6-b185-a036265014d2","Type":"ContainerStarted","Data":"c445af8292f14e4f23f4d8167605efd9cd3f164168922228b3e780d4f1904a7b"} Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.418847 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" event={"ID":"58d085ee-1389-48a6-b185-a036265014d2","Type":"ContainerStarted","Data":"bd701f8b3e808ba28110ed4f1fa1436198312f7d6712a99fa5cd1cb2872f4151"} Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.419578 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.427342 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvqhj\" (UniqueName: \"kubernetes.io/projected/0d1bd14e-f44d-4db3-bd1e-4e65033b9971-kube-api-access-zvqhj\") pod \"dns-default-ckn5c\" (UID: \"0d1bd14e-f44d-4db3-bd1e-4e65033b9971\") " pod="openshift-dns/dns-default-ckn5c" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.429819 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/b7d19303-45b1-4774-b2c7-8fe7c4d3ad83-certs\") pod \"machine-config-server-wng2k\" (UID: \"b7d19303-45b1-4774-b2c7-8fe7c4d3ad83\") " pod="openshift-machine-config-operator/machine-config-server-wng2k" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.434165 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5"] Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.438167 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzgw4\" (UniqueName: \"kubernetes.io/projected/4a6f9a69-a9b0-4da3-b986-7fa2013592eb-kube-api-access-gzgw4\") pod \"multus-admission-controller-857f4d67dd-qdh9l\" (UID: \"4a6f9a69-a9b0-4da3-b986-7fa2013592eb\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-qdh9l" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.440345 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-2wlbg" event={"ID":"ae732ae0-1a69-47f0-8895-a4a9ef61ae1a","Type":"ContainerStarted","Data":"239e5770d868e4ac275c16229e995c2f2f58839e970844925c8e0f39d7f17376"} Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.440367 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-2wlbg" event={"ID":"ae732ae0-1a69-47f0-8895-a4a9ef61ae1a","Type":"ContainerStarted","Data":"9cdd61612277eed23050b13f28353bf2cfac03d72d84d7ca0579982a8ac29172"} Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.441886 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.442039 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-2wlbg" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.442514 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2de208fc-6343-4e47-a533-8914c1cdd981-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-tmzpb\" (UID: \"2de208fc-6343-4e47-a533-8914c1cdd981\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tmzpb" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.447264 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-ckn5c" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.447655 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-7h4pp" event={"ID":"53565bc1-575c-4410-aaea-f6016117621f","Type":"ContainerStarted","Data":"ce7abaf9c64ebb4f0ca3a7452695cd50ff05c080d12d7035a6a8249907fe8be7"} Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.447675 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-7h4pp" event={"ID":"53565bc1-575c-4410-aaea-f6016117621f","Type":"ContainerStarted","Data":"b9bfd04e37f505ed47fbb8bd8af519d6443a0a124d925c109666697e516f1523"} Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.448949 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-69jvv" event={"ID":"d682c234-918c-4189-91bb-09e90f9da4db","Type":"ContainerStarted","Data":"90a47197319717d4bde3e5f9e1b34c409f6cb23ab3913ee0aa1bd136695643ff"} Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.449858 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xc46h" event={"ID":"1dcbcf7f-d600-4279-a14f-edf6226c25e2","Type":"ContainerStarted","Data":"73defabbeaa362a46fc34d870d11524d6b9399dcf52a46bdcf84f9fca09d53e0"} Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.454758 4710 patch_prober.go:28] interesting pod/console-operator-58897d9998-2wlbg container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/readyz\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.454792 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-2wlbg" podUID="ae732ae0-1a69-47f0-8895-a4a9ef61ae1a" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.13:8443/readyz\": dial tcp 10.217.0.13:8443: connect: connection refused" Oct 09 09:07:00 crc kubenswrapper[4710]: W1009 09:07:00.474916 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2b5de9fd_b25e_4062_9492_71eb94e19a44.slice/crio-4f02b00ba9140df8e061243190cbd3acda480d2e25bc5a5d949bd68605de2a2c WatchSource:0}: Error finding container 4f02b00ba9140df8e061243190cbd3acda480d2e25bc5a5d949bd68605de2a2c: Status 404 returned error can't find the container with id 4f02b00ba9140df8e061243190cbd3acda480d2e25bc5a5d949bd68605de2a2c Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.485123 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:00 crc kubenswrapper[4710]: E1009 09:07:00.485921 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:00.985908547 +0000 UTC m=+144.476016945 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.490989 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5ddr\" (UniqueName: \"kubernetes.io/projected/51644181-d79b-4704-873f-d3c13740f656-kube-api-access-v5ddr\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.512052 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5j7x\" (UniqueName: \"kubernetes.io/projected/c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac-kube-api-access-v5j7x\") pod \"csi-hostpathplugin-nnk8n\" (UID: \"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac\") " pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.520049 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qz9ht\" (UniqueName: \"kubernetes.io/projected/052645dc-5ed5-4e24-a940-840efe4862c6-kube-api-access-qz9ht\") pod \"ingress-canary-vzjfq\" (UID: \"052645dc-5ed5-4e24-a940-840efe4862c6\") " pod="openshift-ingress-canary/ingress-canary-vzjfq" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.541123 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjxpl\" (UniqueName: \"kubernetes.io/projected/b7d19303-45b1-4774-b2c7-8fe7c4d3ad83-kube-api-access-mjxpl\") pod \"machine-config-server-wng2k\" (UID: \"b7d19303-45b1-4774-b2c7-8fe7c4d3ad83\") " pod="openshift-machine-config-operator/machine-config-server-wng2k" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.557384 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tmzpb" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.565588 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5bjcn" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.586065 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: E1009 09:07:00.587389 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:01.087376473 +0000 UTC m=+144.577484870 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.628957 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-2zqzn"] Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.672087 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-qdh9l" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.688527 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:00 crc kubenswrapper[4710]: E1009 09:07:00.688814 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:01.188801328 +0000 UTC m=+144.678909725 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.737283 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-48qbf"] Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.751784 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-vzjfq" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.756804 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-wng2k" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.765476 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-bmqbd"] Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.778674 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.789416 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: E1009 09:07:00.789855 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:01.28984295 +0000 UTC m=+144.779951348 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.891308 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:00 crc kubenswrapper[4710]: E1009 09:07:00.891632 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:01.391601933 +0000 UTC m=+144.881710330 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.892092 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:00 crc kubenswrapper[4710]: E1009 09:07:00.892463 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:01.392445953 +0000 UTC m=+144.882554350 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.913939 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-tvt5b"] Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.958568 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-vxwmd"] Oct 09 09:07:00 crc kubenswrapper[4710]: W1009 09:07:00.974242 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf39a416_5cb7_4d33_b794_09fb70b25f4a.slice/crio-70e270e1912dd8bb69cc77efe29c52d35003b9635380b5c9b4c74f05918c1fe4 WatchSource:0}: Error finding container 70e270e1912dd8bb69cc77efe29c52d35003b9635380b5c9b4c74f05918c1fe4: Status 404 returned error can't find the container with id 70e270e1912dd8bb69cc77efe29c52d35003b9635380b5c9b4c74f05918c1fe4 Oct 09 09:07:00 crc kubenswrapper[4710]: I1009 09:07:00.994753 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:00 crc kubenswrapper[4710]: E1009 09:07:00.995367 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:01.495351135 +0000 UTC m=+144.985459532 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:01 crc kubenswrapper[4710]: W1009 09:07:01.041932 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7af80181_f4a1_4112_8792_87f958d2f22e.slice/crio-c8e73c0db71decce72ed00759b80ce691c5bd2605cb0bcc02b45ffb8fda25cff WatchSource:0}: Error finding container c8e73c0db71decce72ed00759b80ce691c5bd2605cb0bcc02b45ffb8fda25cff: Status 404 returned error can't find the container with id c8e73c0db71decce72ed00759b80ce691c5bd2605cb0bcc02b45ffb8fda25cff Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.096631 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:01 crc kubenswrapper[4710]: E1009 09:07:01.097509 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:01.597493771 +0000 UTC m=+145.087602168 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.120499 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.163388 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f4bff"] Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.164024 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-l57dg"] Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.172229 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d"] Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.173936 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fbsvl"] Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.203049 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:01 crc kubenswrapper[4710]: E1009 09:07:01.203229 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:01.703187323 +0000 UTC m=+145.193295720 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.203279 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:01 crc kubenswrapper[4710]: E1009 09:07:01.204376 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:01.704355744 +0000 UTC m=+145.194464141 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.211090 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dgsrr"] Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.223836 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-98nmz"] Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.255735 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dssr9"] Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.309044 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:01 crc kubenswrapper[4710]: E1009 09:07:01.309677 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:01.809661255 +0000 UTC m=+145.299769652 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.344353 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rs7x"] Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.392916 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-klx5p"] Oct 09 09:07:01 crc kubenswrapper[4710]: W1009 09:07:01.407598 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod862286d9_08dd_4330_99ed_04e3b17f2a5b.slice/crio-9e562017ca0aa191171210192b46c63bc0f4190e76ee316eb932696db7e305f4 WatchSource:0}: Error finding container 9e562017ca0aa191171210192b46c63bc0f4190e76ee316eb932696db7e305f4: Status 404 returned error can't find the container with id 9e562017ca0aa191171210192b46c63bc0f4190e76ee316eb932696db7e305f4 Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.412328 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:01 crc kubenswrapper[4710]: E1009 09:07:01.412983 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:01.912971259 +0000 UTC m=+145.403079657 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:01 crc kubenswrapper[4710]: W1009 09:07:01.439123 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddf55607e_2c97_4bd5_b3e0_3a748c6482ef.slice/crio-56fb6f7b17170f5c7390c7a1fb5d39c128c94ac135d6849ddfcfe1bf20df95f3 WatchSource:0}: Error finding container 56fb6f7b17170f5c7390c7a1fb5d39c128c94ac135d6849ddfcfe1bf20df95f3: Status 404 returned error can't find the container with id 56fb6f7b17170f5c7390c7a1fb5d39c128c94ac135d6849ddfcfe1bf20df95f3 Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.502779 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx"] Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.514058 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:01 crc kubenswrapper[4710]: E1009 09:07:01.515679 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:02.015627332 +0000 UTC m=+145.505735730 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.518381 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:01 crc kubenswrapper[4710]: E1009 09:07:01.519534 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:02.019520583 +0000 UTC m=+145.509628981 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.531622 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fkhwq"] Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.556350 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7d642" event={"ID":"21d2e430-51d2-41b4-89e6-4af6eceaf5d3","Type":"ContainerStarted","Data":"67f7f6a65f2d14cd8e3b71a1fa533aef07118526a3bb39710c0e4a24a17c00ec"} Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.570687 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8598s" event={"ID":"dd0a0a80-0cd2-4efe-b988-8f06409dbfac","Type":"ContainerStarted","Data":"4d6a462bd27060171687335884a8188cfe3ff20e66c7239d80d2b44c595decc4"} Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.620245 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:01 crc kubenswrapper[4710]: E1009 09:07:01.620466 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:02.120417824 +0000 UTC m=+145.610526222 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.620706 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:01 crc kubenswrapper[4710]: E1009 09:07:01.621193 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:02.121175111 +0000 UTC m=+145.611283508 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.649975 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-2wlbg" podStartSLOduration=124.649951787 podStartE2EDuration="2m4.649951787s" podCreationTimestamp="2025-10-09 09:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:01.601400866 +0000 UTC m=+145.091509262" watchObservedRunningTime="2025-10-09 09:07:01.649951787 +0000 UTC m=+145.140060185" Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.686893 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-tvt5b" event={"ID":"3aae1458-34de-4abf-b57e-8d3aefaeb644","Type":"ContainerStarted","Data":"7db095b06bf56f4c99814697439a26d75eb1976ac1a64fcf5af5c302547a151c"} Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.702277 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-5q44l" podStartSLOduration=124.702257809 podStartE2EDuration="2m4.702257809s" podCreationTimestamp="2025-10-09 09:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:01.675921769 +0000 UTC m=+145.166030167" watchObservedRunningTime="2025-10-09 09:07:01.702257809 +0000 UTC m=+145.192366207" Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.708589 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tmzpb"] Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.712758 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4zp57" event={"ID":"6fc18397-5fbe-41c5-a28a-eb550cc22dcd","Type":"ContainerStarted","Data":"8bdba90cec420b57a6c50a95c16592e260c9480a49136a75734a34ecf4e24b52"} Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.723284 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:01 crc kubenswrapper[4710]: E1009 09:07:01.723820 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:02.223804593 +0000 UTC m=+145.713912990 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.745590 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-l57dg" event={"ID":"df55607e-2c97-4bd5-b3e0-3a748c6482ef","Type":"ContainerStarted","Data":"56fb6f7b17170f5c7390c7a1fb5d39c128c94ac135d6849ddfcfe1bf20df95f3"} Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.750556 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dm7lm" event={"ID":"4e0e1068-6e20-498c-a8b0-c61513824a86","Type":"ContainerStarted","Data":"a09117f3136b899932a9d6196e33a0deae8e3b75b15b362662e5ddd039d6f8ea"} Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.750871 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dm7lm" event={"ID":"4e0e1068-6e20-498c-a8b0-c61513824a86","Type":"ContainerStarted","Data":"b2fce0df8296c1c95a2b974a2425a00495205979011c8f50bd718e6a21de386b"} Oct 09 09:07:01 crc kubenswrapper[4710]: W1009 09:07:01.760652 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51378278_0202_4be1_96a8_28f4c81a6aae.slice/crio-43fa1e38bb47e20b38a04067276114fdb94242ba5d7f559d6aae56f3ce249059 WatchSource:0}: Error finding container 43fa1e38bb47e20b38a04067276114fdb94242ba5d7f559d6aae56f3ce249059: Status 404 returned error can't find the container with id 43fa1e38bb47e20b38a04067276114fdb94242ba5d7f559d6aae56f3ce249059 Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.789181 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-ckn5c"] Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.798577 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-9lzhs" event={"ID":"4f054e02-d261-4d4e-9333-b7c469374c24","Type":"ContainerStarted","Data":"e235f333146ddb286b54e203d702eef9421dd96d37f322ae3924a212b69683af"} Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.827325 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.831514 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dgsrr" event={"ID":"862286d9-08dd-4330-99ed-04e3b17f2a5b","Type":"ContainerStarted","Data":"9e562017ca0aa191171210192b46c63bc0f4190e76ee316eb932696db7e305f4"} Oct 09 09:07:01 crc kubenswrapper[4710]: E1009 09:07:01.832118 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:02.332084415 +0000 UTC m=+145.822192832 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.833778 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" podStartSLOduration=123.833764669 podStartE2EDuration="2m3.833764669s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:01.804897883 +0000 UTC m=+145.295006300" watchObservedRunningTime="2025-10-09 09:07:01.833764669 +0000 UTC m=+145.323873067" Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.845914 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-qp498" event={"ID":"6e022686-4480-4610-9760-e1487bb99265","Type":"ContainerStarted","Data":"654bc18f2b2e939a4252d741f95c6878bd010249df44ede844e5ca220e577562"} Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.845951 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-qp498" event={"ID":"6e022686-4480-4610-9760-e1487bb99265","Type":"ContainerStarted","Data":"cb26b4e1a9af3ccf0f1204bcd628739185b4d74c1208b8657ec533a2073f0405"} Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.855031 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wt2d2" event={"ID":"16efdb77-5db5-410b-9f59-aed6293dbcab","Type":"ContainerStarted","Data":"3e4ddd39619dac7c07d054ce3a565991cbf08b19f92adae2c60ebd7633c58b28"} Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.859059 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f4bff" event={"ID":"d3f821a1-ba36-423a-b20b-82dc307a8c22","Type":"ContainerStarted","Data":"d3213375e935475b0de06dc957ccb62653e51083e5baf214d0648287f6c20451"} Oct 09 09:07:01 crc kubenswrapper[4710]: W1009 09:07:01.865475 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2de208fc_6343_4e47_a533_8914c1cdd981.slice/crio-2a86687a35ac76af483187779b851a173ef038249783081231d1fbbdf6a756f5 WatchSource:0}: Error finding container 2a86687a35ac76af483187779b851a173ef038249783081231d1fbbdf6a756f5: Status 404 returned error can't find the container with id 2a86687a35ac76af483187779b851a173ef038249783081231d1fbbdf6a756f5 Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.876490 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" event={"ID":"d4f9a31b-650d-46d5-b8d3-9176e2048beb","Type":"ContainerStarted","Data":"e6d43b453337060fdaf8d2cca67fdafba70a12ce88a71daead7da94e3c789dae"} Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.909276 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-2k94j" event={"ID":"c3193888-6214-44cb-a0bc-0091046b80c2","Type":"ContainerStarted","Data":"a146b7f0000dd8faa062d3beaec0f79d49e4aa6a78f2c382a0ec2eb3b8758fba"} Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.909318 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-2k94j" event={"ID":"c3193888-6214-44cb-a0bc-0091046b80c2","Type":"ContainerStarted","Data":"fa33793ca38bd5f9ff0a16f06bb9435cb40083acd0dc8294c53afaedbdebeb7f"} Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.921140 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-7h4pp" podStartSLOduration=124.921128747 podStartE2EDuration="2m4.921128747s" podCreationTimestamp="2025-10-09 09:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:01.919262313 +0000 UTC m=+145.409370710" watchObservedRunningTime="2025-10-09 09:07:01.921128747 +0000 UTC m=+145.411237144" Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.924493 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d" event={"ID":"6907d5b6-8950-45a6-bac5-2bc61c0d8427","Type":"ContainerStarted","Data":"47cb8f6548a806ef4d6df55974739de65b026d45da91431f728e2cf26764310f"} Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.927922 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:01 crc kubenswrapper[4710]: E1009 09:07:01.931182 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:02.431166087 +0000 UTC m=+145.921274485 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.949159 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5j8t6" event={"ID":"2b5de9fd-b25e-4062-9492-71eb94e19a44","Type":"ContainerStarted","Data":"4f02b00ba9140df8e061243190cbd3acda480d2e25bc5a5d949bd68605de2a2c"} Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.982008 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" event={"ID":"7af80181-f4a1-4112-8792-87f958d2f22e","Type":"ContainerStarted","Data":"c8e73c0db71decce72ed00759b80ce691c5bd2605cb0bcc02b45ffb8fda25cff"} Oct 09 09:07:01 crc kubenswrapper[4710]: I1009 09:07:01.987899 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-bmqbd" event={"ID":"5edaffb3-e33b-45b9-89fa-39322022da37","Type":"ContainerStarted","Data":"8b661aaae86a1a452bcbf8b11aa794b5ed5f8b414c453035fc5b0a0511237e46"} Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.016720 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-qp498" Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.030951 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:02 crc kubenswrapper[4710]: E1009 09:07:02.032239 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:02.532224662 +0000 UTC m=+146.022333049 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.035821 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-5bjcn"] Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.036587 4710 patch_prober.go:28] interesting pod/router-default-5444994796-qp498 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 09:07:02 crc kubenswrapper[4710]: [-]has-synced failed: reason withheld Oct 09 09:07:02 crc kubenswrapper[4710]: [+]process-running ok Oct 09 09:07:02 crc kubenswrapper[4710]: healthz check failed Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.036793 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qp498" podUID="6e022686-4480-4610-9760-e1487bb99265" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.038508 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-nnk8n"] Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.050704 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-69jvv" event={"ID":"d682c234-918c-4189-91bb-09e90f9da4db","Type":"ContainerStarted","Data":"0770f1969d38f07c6433cfca414606d346cddd1133c42ff4d8dc9de38239428d"} Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.065110 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" event={"ID":"23aab9d2-bc11-496c-879b-14b3fe7d7dd7","Type":"ContainerStarted","Data":"f8255a92145a5389d824f0072ee458b173a60c1dca67258310920a7907fc40f6"} Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.092043 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" event={"ID":"e6867809-73e3-4291-97d4-cb38b0aeae7b","Type":"ContainerStarted","Data":"2e6c0e5c5c55f61ce0b7b5971559baf917db34a010bcd0042d84d944bb5644a3"} Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.092853 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.125170 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xc46h" event={"ID":"1dcbcf7f-d600-4279-a14f-edf6226c25e2","Type":"ContainerStarted","Data":"6388f1ce664947ae66ce1471127afb1457170d8e87ca314b5305efde24533d3a"} Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.125675 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.128969 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-qdh9l"] Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.135889 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vxwmd" event={"ID":"778aaf63-ea06-45a1-b031-efb3809bf0ec","Type":"ContainerStarted","Data":"33da91a20216eb7ef9cbfab4c3bacfd32014d7f734a28c4d8d19d7442d66ecd4"} Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.146913 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:02 crc kubenswrapper[4710]: E1009 09:07:02.147829 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:02.647813107 +0000 UTC m=+146.137921504 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.209314 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-dm7lm" podStartSLOduration=125.209295409 podStartE2EDuration="2m5.209295409s" podCreationTimestamp="2025-10-09 09:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:02.167898077 +0000 UTC m=+145.658006474" watchObservedRunningTime="2025-10-09 09:07:02.209295409 +0000 UTC m=+145.699403806" Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.209514 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" podStartSLOduration=124.209510684 podStartE2EDuration="2m4.209510684s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:02.208853767 +0000 UTC m=+145.698962164" watchObservedRunningTime="2025-10-09 09:07:02.209510684 +0000 UTC m=+145.699619081" Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.209688 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2zqzn" event={"ID":"cf39a416-5cb7-4d33-b794-09fb70b25f4a","Type":"ContainerStarted","Data":"70e270e1912dd8bb69cc77efe29c52d35003b9635380b5c9b4c74f05918c1fe4"} Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.232319 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-vzjfq"] Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.236270 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wt2d2" podStartSLOduration=125.236257298 podStartE2EDuration="2m5.236257298s" podCreationTimestamp="2025-10-09 09:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:02.235922198 +0000 UTC m=+145.726030595" watchObservedRunningTime="2025-10-09 09:07:02.236257298 +0000 UTC m=+145.726365695" Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.252683 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dssr9" event={"ID":"9c3efa77-9487-4940-a894-e8a10f0c9453","Type":"ContainerStarted","Data":"10bc24f2a2e215e4e0a7eb4a4246a8130c066bf5ea8edf5f351fcbcfa45f774a"} Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.253679 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:02 crc kubenswrapper[4710]: E1009 09:07:02.254125 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:02.75411232 +0000 UTC m=+146.244220717 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.271100 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-2wlbg" Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.313100 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-qp498" podStartSLOduration=124.313082311 podStartE2EDuration="2m4.313082311s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:02.283638378 +0000 UTC m=+145.773746775" watchObservedRunningTime="2025-10-09 09:07:02.313082311 +0000 UTC m=+145.803190708" Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.354684 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-9lzhs" podStartSLOduration=124.354669 podStartE2EDuration="2m4.354669s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:02.312988836 +0000 UTC m=+145.803097232" watchObservedRunningTime="2025-10-09 09:07:02.354669 +0000 UTC m=+145.844777396" Oct 09 09:07:02 crc kubenswrapper[4710]: E1009 09:07:02.355810 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:02.855796493 +0000 UTC m=+146.345904890 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.355825 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.356300 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:02 crc kubenswrapper[4710]: E1009 09:07:02.358768 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:02.85874792 +0000 UTC m=+146.348856307 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.446374 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-xc46h" podStartSLOduration=124.446358703 podStartE2EDuration="2m4.446358703s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:02.445176115 +0000 UTC m=+145.935284513" watchObservedRunningTime="2025-10-09 09:07:02.446358703 +0000 UTC m=+145.936467100" Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.466932 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:02 crc kubenswrapper[4710]: E1009 09:07:02.467215 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:02.96718019 +0000 UTC m=+146.457288587 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.571224 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:02 crc kubenswrapper[4710]: E1009 09:07:02.571646 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:03.071630561 +0000 UTC m=+146.561738958 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.673737 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.673946 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.674000 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.674050 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.674103 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:07:02 crc kubenswrapper[4710]: E1009 09:07:02.677867 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:03.177848461 +0000 UTC m=+146.667956858 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.678491 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.696463 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.697282 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.697810 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.724671 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.729303 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.734391 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.775517 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:02 crc kubenswrapper[4710]: E1009 09:07:02.775795 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:03.275779105 +0000 UTC m=+146.765887493 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.876655 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:02 crc kubenswrapper[4710]: E1009 09:07:02.876806 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:03.376787156 +0000 UTC m=+146.866895552 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.877134 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:02 crc kubenswrapper[4710]: E1009 09:07:02.877475 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:03.37744835 +0000 UTC m=+146.867556747 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:02 crc kubenswrapper[4710]: I1009 09:07:02.983501 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:02 crc kubenswrapper[4710]: E1009 09:07:02.983883 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:03.483868512 +0000 UTC m=+146.973976908 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.031455 4710 patch_prober.go:28] interesting pod/router-default-5444994796-qp498 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 09:07:03 crc kubenswrapper[4710]: [-]has-synced failed: reason withheld Oct 09 09:07:03 crc kubenswrapper[4710]: [+]process-running ok Oct 09 09:07:03 crc kubenswrapper[4710]: healthz check failed Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.031499 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qp498" podUID="6e022686-4480-4610-9760-e1487bb99265" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.086144 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:03 crc kubenswrapper[4710]: E1009 09:07:03.086705 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:03.586694764 +0000 UTC m=+147.076803161 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.189853 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:03 crc kubenswrapper[4710]: E1009 09:07:03.198450 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:03.698411619 +0000 UTC m=+147.188520015 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.296238 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:03 crc kubenswrapper[4710]: E1009 09:07:03.296506 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:03.796495101 +0000 UTC m=+147.286603498 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.345941 4710 generic.go:334] "Generic (PLEG): container finished" podID="dd0a0a80-0cd2-4efe-b988-8f06409dbfac" containerID="fa0db33275d91742720dc9554507cd618ed305244b77bcde657f6b33da57f703" exitCode=0 Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.346290 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8598s" event={"ID":"dd0a0a80-0cd2-4efe-b988-8f06409dbfac","Type":"ContainerDied","Data":"fa0db33275d91742720dc9554507cd618ed305244b77bcde657f6b33da57f703"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.354778 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx" event={"ID":"2f367bf0-fb0c-4884-b874-d4a426a8ff5a","Type":"ContainerStarted","Data":"57693cd1cf50852dac8ec9a559e30eb932fbf87ee2f6b483640c94699816c4b5"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.354809 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx" event={"ID":"2f367bf0-fb0c-4884-b874-d4a426a8ff5a","Type":"ContainerStarted","Data":"3fc19909af0c1beab96444d575b964e8bba7b415e3a00a60b7e48dc440eb16e0"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.396000 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-qdh9l" event={"ID":"4a6f9a69-a9b0-4da3-b986-7fa2013592eb","Type":"ContainerStarted","Data":"35e90765ef05819dacb638ff9151774c2aa303988d4754cd391c6620b4d22428"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.399404 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:03 crc kubenswrapper[4710]: E1009 09:07:03.400151 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:03.900138944 +0000 UTC m=+147.390247342 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.411531 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx" podStartSLOduration=126.411520877 podStartE2EDuration="2m6.411520877s" podCreationTimestamp="2025-10-09 09:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:03.409682354 +0000 UTC m=+146.899790741" watchObservedRunningTime="2025-10-09 09:07:03.411520877 +0000 UTC m=+146.901629274" Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.434975 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fbsvl" event={"ID":"4d30e7c6-48d6-49dd-b6cc-f983d70eecd0","Type":"ContainerStarted","Data":"55954e9bc620ec53f609ee1c5a5ee6a8de83a1e967681be51099c2e7b6b2ff16"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.475183 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rs7x" event={"ID":"1a0ab392-4bad-4a96-b6cc-14b706777850","Type":"ContainerStarted","Data":"8fd8b44296fdea7e4fde914aacadc0dec7d03de081753a83bfd23375bd044131"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.475243 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rs7x" event={"ID":"1a0ab392-4bad-4a96-b6cc-14b706777850","Type":"ContainerStarted","Data":"32afc428ef44cc1b76d84d721903bc27025454c37540182e907c0ccdf1141498"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.501459 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:03 crc kubenswrapper[4710]: E1009 09:07:03.502229 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:04.002218601 +0000 UTC m=+147.492326999 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.509796 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-wng2k" event={"ID":"b7d19303-45b1-4774-b2c7-8fe7c4d3ad83","Type":"ContainerStarted","Data":"51e165ccc6cd145a356ceafbd23fa818cf5bd75b0c83182b386eebf86047087b"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.525737 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2rs7x" podStartSLOduration=125.52571224 podStartE2EDuration="2m5.52571224s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:03.524658877 +0000 UTC m=+147.014767264" watchObservedRunningTime="2025-10-09 09:07:03.52571224 +0000 UTC m=+147.015820637" Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.563537 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.564221 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-vzjfq" event={"ID":"052645dc-5ed5-4e24-a940-840efe4862c6","Type":"ContainerStarted","Data":"1ab63d563ab42bf2d0fcae8e1258b1abf39fdc29b26753bdfcbc52a636d6ca2f"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.592998 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5bjcn" event={"ID":"bd8accf5-de9b-4eeb-9a75-1a90317d72ea","Type":"ContainerStarted","Data":"09b8f3a7f801cce9dce58aee6d681b1c1f48fcef455e879b8ff88bf06011efa6"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.605290 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:03 crc kubenswrapper[4710]: E1009 09:07:03.606149 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:04.1061364 +0000 UTC m=+147.596244798 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.608358 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-9lzhs" event={"ID":"4f054e02-d261-4d4e-9333-b7c469374c24","Type":"ContainerStarted","Data":"58dd9c49d1f19a172535067820d399f79fe65aedf4d313473bb1e5b09ca7e2b2"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.707064 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:03 crc kubenswrapper[4710]: E1009 09:07:03.710171 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:04.210161381 +0000 UTC m=+147.700269778 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.716979 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-vzjfq" podStartSLOduration=6.716964321 podStartE2EDuration="6.716964321s" podCreationTimestamp="2025-10-09 09:06:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:03.715481849 +0000 UTC m=+147.205590246" watchObservedRunningTime="2025-10-09 09:07:03.716964321 +0000 UTC m=+147.207072718" Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.718064 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5j8t6" event={"ID":"2b5de9fd-b25e-4062-9492-71eb94e19a44","Type":"ContainerStarted","Data":"c21f693b49ff620d774891c117f866e0e5885aad9b07bbcafd3e944c4deb5b10"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.718705 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5j8t6" Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.724607 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-98nmz" event={"ID":"cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e","Type":"ContainerStarted","Data":"15d4d219f5e7ef1d5da6227da4a547f092dccb9de7492767ff28c1ae234b6ee8"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.726752 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-69jvv" event={"ID":"d682c234-918c-4189-91bb-09e90f9da4db","Type":"ContainerStarted","Data":"856ec58df10c48f82c822702ecfa10fc72482e5a71366cb83cdf37676639bf77"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.734996 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fkhwq" event={"ID":"51378278-0202-4be1-96a8-28f4c81a6aae","Type":"ContainerStarted","Data":"43fa1e38bb47e20b38a04067276114fdb94242ba5d7f559d6aae56f3ce249059"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.776157 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-bmqbd" event={"ID":"5edaffb3-e33b-45b9-89fa-39322022da37","Type":"ContainerStarted","Data":"353c3903f6b15b91a90c065495709c90a3f35830a57a3c1273f5bd844ec415f7"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.777053 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-bmqbd" Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.788704 4710 patch_prober.go:28] interesting pod/downloads-7954f5f757-bmqbd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.789013 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-bmqbd" podUID="5edaffb3-e33b-45b9-89fa-39322022da37" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.789014 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2zqzn" event={"ID":"cf39a416-5cb7-4d33-b794-09fb70b25f4a","Type":"ContainerStarted","Data":"76729301a325f6db16d965def6129ced5fe48c19b80519e129367d564eb91e81"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.791083 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dssr9" event={"ID":"9c3efa77-9487-4940-a894-e8a10f0c9453","Type":"ContainerStarted","Data":"d5c4b4ca564b8b1317d4dbd0da9ff31818350c6e1a83afa0274f0d20d1759781"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.807990 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:03 crc kubenswrapper[4710]: E1009 09:07:03.808902 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:04.308883587 +0000 UTC m=+147.798991983 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.841024 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" event={"ID":"7af80181-f4a1-4112-8792-87f958d2f22e","Type":"ContainerStarted","Data":"8c85b87eb3f69c8c624537869ab1caf83cc2cc7eeaaa65191a94f6f5a4d3388d"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.850615 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5j8t6" podStartSLOduration=125.850599097 podStartE2EDuration="2m5.850599097s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:03.848791443 +0000 UTC m=+147.338899840" watchObservedRunningTime="2025-10-09 09:07:03.850599097 +0000 UTC m=+147.340707493" Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.864733 4710 generic.go:334] "Generic (PLEG): container finished" podID="23aab9d2-bc11-496c-879b-14b3fe7d7dd7" containerID="e2ab2d94a0de3693a7a1c06026e58e6a73cbccff0b729acc587286b10889b077" exitCode=0 Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.864803 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" event={"ID":"23aab9d2-bc11-496c-879b-14b3fe7d7dd7","Type":"ContainerDied","Data":"e2ab2d94a0de3693a7a1c06026e58e6a73cbccff0b729acc587286b10889b077"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.892701 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-l57dg" event={"ID":"df55607e-2c97-4bd5-b3e0-3a748c6482ef","Type":"ContainerStarted","Data":"9bae8a9ad560cf55ef6d6e218aec891eb20a6869ddab358f4683bdd10b72080c"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.917270 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:03 crc kubenswrapper[4710]: E1009 09:07:03.922004 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:04.421991169 +0000 UTC m=+147.912099566 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.931527 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fkhwq" podStartSLOduration=125.931501818 podStartE2EDuration="2m5.931501818s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:03.927503088 +0000 UTC m=+147.417611485" watchObservedRunningTime="2025-10-09 09:07:03.931501818 +0000 UTC m=+147.421610215" Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.935833 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7d642" event={"ID":"21d2e430-51d2-41b4-89e6-4af6eceaf5d3","Type":"ContainerStarted","Data":"f83ceb6d735c190a4a36f38c03f142bd7e842eae3159a1958c895572cf3d6958"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.937558 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.954919 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" event={"ID":"d4f9a31b-650d-46d5-b8d3-9176e2048beb","Type":"ContainerStarted","Data":"f0c56a7faa368cd4a10cb5973739a67a465e06b0ed25059fec2f0c6e927826ef"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.955773 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.956635 4710 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-7cjtd container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.24:8080/healthz\": dial tcp 10.217.0.24:8080: connect: connection refused" start-of-body= Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.956663 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" podUID="d4f9a31b-650d-46d5-b8d3-9176e2048beb" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.24:8080/healthz\": dial tcp 10.217.0.24:8080: connect: connection refused" Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.976720 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dgsrr" event={"ID":"862286d9-08dd-4330-99ed-04e3b17f2a5b","Type":"ContainerStarted","Data":"5b70a97ace824f213c95618d7f400f4052cc7fb8d20682bbae592b67a287549a"} Oct 09 09:07:03 crc kubenswrapper[4710]: I1009 09:07:03.977655 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dgsrr" Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.000521 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-98nmz" podStartSLOduration=126.000492559 podStartE2EDuration="2m6.000492559s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:03.967700771 +0000 UTC m=+147.457809169" watchObservedRunningTime="2025-10-09 09:07:04.000492559 +0000 UTC m=+147.490600956" Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.001738 4710 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-dgsrr container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" start-of-body= Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.006194 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dgsrr" podUID="862286d9-08dd-4330-99ed-04e3b17f2a5b" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.018256 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:04 crc kubenswrapper[4710]: E1009 09:07:04.019552 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:04.51953183 +0000 UTC m=+148.009640227 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.020736 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" event={"ID":"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac","Type":"ContainerStarted","Data":"02467c1a5dfb41afb5ca3bc234c09d8efd6657e9bb92ac891433dc8b1f3a7b8f"} Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.047594 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4zp57" event={"ID":"6fc18397-5fbe-41c5-a28a-eb550cc22dcd","Type":"ContainerStarted","Data":"fca0380586167d4efab07121c12711cec5609ff525912a811553ee8b843c6d83"} Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.079588 4710 patch_prober.go:28] interesting pod/router-default-5444994796-qp498 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 09:07:04 crc kubenswrapper[4710]: [-]has-synced failed: reason withheld Oct 09 09:07:04 crc kubenswrapper[4710]: [+]process-running ok Oct 09 09:07:04 crc kubenswrapper[4710]: healthz check failed Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.079932 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qp498" podUID="6e022686-4480-4610-9760-e1487bb99265" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.106226 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-2k94j" event={"ID":"c3193888-6214-44cb-a0bc-0091046b80c2","Type":"ContainerStarted","Data":"35e695632b28d5369c0e9f8e2273d5c6fc90a541c9056f26c0767091a2b025eb"} Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.109068 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2zqzn" podStartSLOduration=126.109052649 podStartE2EDuration="2m6.109052649s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:04.107074334 +0000 UTC m=+147.597182731" watchObservedRunningTime="2025-10-09 09:07:04.109052649 +0000 UTC m=+147.599161045" Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.111610 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-69jvv" podStartSLOduration=126.111602119 podStartE2EDuration="2m6.111602119s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:04.046714054 +0000 UTC m=+147.536822451" watchObservedRunningTime="2025-10-09 09:07:04.111602119 +0000 UTC m=+147.601710516" Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.130263 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:04 crc kubenswrapper[4710]: E1009 09:07:04.131319 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:04.631305552 +0000 UTC m=+148.121413948 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.133402 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-ckn5c" event={"ID":"0d1bd14e-f44d-4db3-bd1e-4e65033b9971","Type":"ContainerStarted","Data":"f38f298bb6bf7488017769faf3519687f714d005db90bc05acae8dc7ac824bd2"} Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.191196 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-klx5p" event={"ID":"fc2e287d-2f35-40d9-ac9a-e8a67e3ea01e","Type":"ContainerStarted","Data":"a323b4155159fc1d462db0d0cb2454f51dfaaff35c992803f2e1d859d21ab6ff"} Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.228812 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-7d642" podStartSLOduration=127.228795546 podStartE2EDuration="2m7.228795546s" podCreationTimestamp="2025-10-09 09:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:04.163834223 +0000 UTC m=+147.653942620" watchObservedRunningTime="2025-10-09 09:07:04.228795546 +0000 UTC m=+147.718903943" Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.232586 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:04 crc kubenswrapper[4710]: E1009 09:07:04.233746 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:04.733730599 +0000 UTC m=+148.223838997 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.281949 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f4bff" event={"ID":"d3f821a1-ba36-423a-b20b-82dc307a8c22","Type":"ContainerStarted","Data":"36c236f87643f7680be20456a8ab8e2a7a62b0f037866f7ed492b97137c189a1"} Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.302930 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tmzpb" event={"ID":"2de208fc-6343-4e47-a533-8914c1cdd981","Type":"ContainerStarted","Data":"2a86687a35ac76af483187779b851a173ef038249783081231d1fbbdf6a756f5"} Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.327077 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dgsrr" podStartSLOduration=126.327062985 podStartE2EDuration="2m6.327062985s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:04.23189909 +0000 UTC m=+147.722007487" watchObservedRunningTime="2025-10-09 09:07:04.327062985 +0000 UTC m=+147.817171382" Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.338887 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:04 crc kubenswrapper[4710]: E1009 09:07:04.339937 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:04.839923351 +0000 UTC m=+148.330031748 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.384944 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-bmqbd" podStartSLOduration=127.384921634 podStartE2EDuration="2m7.384921634s" podCreationTimestamp="2025-10-09 09:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:04.327649189 +0000 UTC m=+147.817757586" watchObservedRunningTime="2025-10-09 09:07:04.384921634 +0000 UTC m=+147.875030030" Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.385164 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dssr9" podStartSLOduration=126.385156717 podStartE2EDuration="2m6.385156717s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:04.38369834 +0000 UTC m=+147.873806737" watchObservedRunningTime="2025-10-09 09:07:04.385156717 +0000 UTC m=+147.875265113" Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.443035 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:04 crc kubenswrapper[4710]: E1009 09:07:04.444371 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:04.944354456 +0000 UTC m=+148.434462853 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.540042 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-48qbf" podStartSLOduration=126.540028222 podStartE2EDuration="2m6.540028222s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:04.538949881 +0000 UTC m=+148.029058278" watchObservedRunningTime="2025-10-09 09:07:04.540028222 +0000 UTC m=+148.030136619" Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.550061 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:04 crc kubenswrapper[4710]: E1009 09:07:04.550385 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:05.050372891 +0000 UTC m=+148.540481288 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.652970 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:04 crc kubenswrapper[4710]: E1009 09:07:04.653458 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:05.153442272 +0000 UTC m=+148.643550670 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.667028 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" podStartSLOduration=126.667017304 podStartE2EDuration="2m6.667017304s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:04.624780661 +0000 UTC m=+148.114889059" watchObservedRunningTime="2025-10-09 09:07:04.667017304 +0000 UTC m=+148.157125700" Oct 09 09:07:04 crc kubenswrapper[4710]: W1009 09:07:04.695560 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-7147708d4f8c566cf95c285da72134990306306a63ddedc7960bfce908d38224 WatchSource:0}: Error finding container 7147708d4f8c566cf95c285da72134990306306a63ddedc7960bfce908d38224: Status 404 returned error can't find the container with id 7147708d4f8c566cf95c285da72134990306306a63ddedc7960bfce908d38224 Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.753875 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:04 crc kubenswrapper[4710]: E1009 09:07:04.760708 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:05.26069459 +0000 UTC m=+148.750802987 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.808029 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4zp57" podStartSLOduration=127.808006659 podStartE2EDuration="2m7.808006659s" podCreationTimestamp="2025-10-09 09:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:04.803243008 +0000 UTC m=+148.293351406" watchObservedRunningTime="2025-10-09 09:07:04.808006659 +0000 UTC m=+148.298115056" Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.854808 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:04 crc kubenswrapper[4710]: E1009 09:07:04.855214 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:05.355189625 +0000 UTC m=+148.845298022 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.938542 4710 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-7d642 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.8:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.938761 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-7d642" podUID="21d2e430-51d2-41b4-89e6-4af6eceaf5d3" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.8:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 09 09:07:04 crc kubenswrapper[4710]: I1009 09:07:04.958609 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:04 crc kubenswrapper[4710]: E1009 09:07:04.959077 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:05.459060815 +0000 UTC m=+148.949169213 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.017917 4710 patch_prober.go:28] interesting pod/router-default-5444994796-qp498 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 09:07:05 crc kubenswrapper[4710]: [-]has-synced failed: reason withheld Oct 09 09:07:05 crc kubenswrapper[4710]: [+]process-running ok Oct 09 09:07:05 crc kubenswrapper[4710]: healthz check failed Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.017977 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qp498" podUID="6e022686-4480-4610-9760-e1487bb99265" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.051993 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-f4bff" podStartSLOduration=127.051974373 podStartE2EDuration="2m7.051974373s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:04.910468796 +0000 UTC m=+148.400577193" watchObservedRunningTime="2025-10-09 09:07:05.051974373 +0000 UTC m=+148.542082771" Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.053527 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-klx5p" podStartSLOduration=127.053516688 podStartE2EDuration="2m7.053516688s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:05.051532632 +0000 UTC m=+148.541641028" watchObservedRunningTime="2025-10-09 09:07:05.053516688 +0000 UTC m=+148.543625084" Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.059663 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:05 crc kubenswrapper[4710]: E1009 09:07:05.059826 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:05.559805059 +0000 UTC m=+149.049913456 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.060069 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:05 crc kubenswrapper[4710]: E1009 09:07:05.060390 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:05.560375994 +0000 UTC m=+149.050484391 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.161742 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:05 crc kubenswrapper[4710]: E1009 09:07:05.161939 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:05.661917007 +0000 UTC m=+149.152025403 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.162026 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:05 crc kubenswrapper[4710]: E1009 09:07:05.162364 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:05.662341697 +0000 UTC m=+149.152450094 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.263045 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:05 crc kubenswrapper[4710]: E1009 09:07:05.263222 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:05.763193913 +0000 UTC m=+149.253302309 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.263644 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:05 crc kubenswrapper[4710]: E1009 09:07:05.263982 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:05.763973821 +0000 UTC m=+149.254082218 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.294714 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-2k94j" podStartSLOduration=127.294687917 podStartE2EDuration="2m7.294687917s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:05.190776229 +0000 UTC m=+148.680884626" watchObservedRunningTime="2025-10-09 09:07:05.294687917 +0000 UTC m=+148.784796314" Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.344662 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" event={"ID":"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac","Type":"ContainerStarted","Data":"cabe445a20a0182aca1696fd8fce3ba3c5cc2cbeb0ce693ef37d468ff54e9ac6"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.346489 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tmzpb" event={"ID":"2de208fc-6343-4e47-a533-8914c1cdd981","Type":"ContainerStarted","Data":"72468e62ee6e03bb882cf32c281e40aea9bc5d504e377225fb0fc41c9a916ef3"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.352891 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4zp57" event={"ID":"6fc18397-5fbe-41c5-a28a-eb550cc22dcd","Type":"ContainerStarted","Data":"15194827b4aab2589ee65d62d275341c5f75dc85800422903a360602ef53cc45"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.366908 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:05 crc kubenswrapper[4710]: E1009 09:07:05.367193 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:05.867176764 +0000 UTC m=+149.357285161 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.372915 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d" event={"ID":"6907d5b6-8950-45a6-bac5-2bc61c0d8427","Type":"ContainerStarted","Data":"d2942fc683d82d503ce765eaef8c0f47e64e6b04d36b1fb783c8a38dc7e542c3"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.373738 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d" Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.387819 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-fkhwq" event={"ID":"51378278-0202-4be1-96a8-28f4c81a6aae","Type":"ContainerStarted","Data":"4db2a099c996f5804512e8cc05faf856a4883ab4f325012a2e13b5e0d5bc23c0"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.416956 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" event={"ID":"23aab9d2-bc11-496c-879b-14b3fe7d7dd7","Type":"ContainerStarted","Data":"3691a293bad95196244ad62a2d7714dfccd8ce89acf8109cdc0333dba363e04f"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.433761 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d" podStartSLOduration=127.433737388 podStartE2EDuration="2m7.433737388s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:05.432583305 +0000 UTC m=+148.922691702" watchObservedRunningTime="2025-10-09 09:07:05.433737388 +0000 UTC m=+148.923845786" Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.434875 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-tmzpb" podStartSLOduration=127.434866755 podStartE2EDuration="2m7.434866755s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:05.330988151 +0000 UTC m=+148.821096548" watchObservedRunningTime="2025-10-09 09:07:05.434866755 +0000 UTC m=+148.924975152" Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.436100 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-vzjfq" event={"ID":"052645dc-5ed5-4e24-a940-840efe4862c6","Type":"ContainerStarted","Data":"14e856156e38a2d3efcaa1172505a81965380a99cdd3ac11c96f37528761c2a4"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.449914 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vxwmd" event={"ID":"778aaf63-ea06-45a1-b031-efb3809bf0ec","Type":"ContainerStarted","Data":"58e45bede30ca7cd1049460b63d73cb90beefbdca42307f27f479572a40992b8"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.449944 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vxwmd" event={"ID":"778aaf63-ea06-45a1-b031-efb3809bf0ec","Type":"ContainerStarted","Data":"112de394ab3e90ad56474f94c50c3f90c3d9321c87bcb5e5a7ed4e5c268b2676"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.459994 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"3e0febee8e4d53110a4165114a5214abbd4f062d85bc4d654572b02fba3e7675"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.460050 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"7147708d4f8c566cf95c285da72134990306306a63ddedc7960bfce908d38224"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.465066 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"e21419ce57c345faaba9a1829915f6a5b8ea1744d94043f54325d5214cfeaf99"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.465108 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"520fca52cb1ebd0a27ceaa63811b882935fc10424bd66d5a6fefa54a028b01cb"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.468044 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:05 crc kubenswrapper[4710]: E1009 09:07:05.470234 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:05.97021712 +0000 UTC m=+149.460325517 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.472159 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5j8t6" event={"ID":"2b5de9fd-b25e-4062-9492-71eb94e19a44","Type":"ContainerStarted","Data":"68e0a207fcec275cb5e7ef7fcda44074ec8379dec1d29f153cf148e17fd25827"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.478582 4710 generic.go:334] "Generic (PLEG): container finished" podID="bd8accf5-de9b-4eeb-9a75-1a90317d72ea" containerID="ff18965f25baa3801b143538e5c62b5b38b62558c041388a6f08d273ad59257e" exitCode=0 Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.478652 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5bjcn" event={"ID":"bd8accf5-de9b-4eeb-9a75-1a90317d72ea","Type":"ContainerDied","Data":"ff18965f25baa3801b143538e5c62b5b38b62558c041388a6f08d273ad59257e"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.486455 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-wng2k" event={"ID":"b7d19303-45b1-4774-b2c7-8fe7c4d3ad83","Type":"ContainerStarted","Data":"b7d4ff1dc3641cb06f7964d0204dacb8def0302035945f09ab97d3859604bc04"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.495965 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fbsvl" event={"ID":"4d30e7c6-48d6-49dd-b6cc-f983d70eecd0","Type":"ContainerStarted","Data":"8ff5df5ea06f8ee82fb4d70bd5d643219d0cd17182da1cd488371a6880348902"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.496724 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fbsvl" Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.504755 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-klx5p" event={"ID":"fc2e287d-2f35-40d9-ac9a-e8a67e3ea01e","Type":"ContainerStarted","Data":"348de8318525a546dd8cfbc2c05ca30534c2ca49040de6c66ef75785d7c27d7c"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.504800 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-klx5p" event={"ID":"fc2e287d-2f35-40d9-ac9a-e8a67e3ea01e","Type":"ContainerStarted","Data":"ed6cc2ee1cc2187624d3c4fced12e2c0fe2a429d08bdd37fd286462f4917ddfb"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.518658 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"979d64bf3d8d75b2d959b35b888cf76d743af57184620eb66d2dc0e42e09a197"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.518785 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"20a6ac6c72047ca70852248b394f78d7f55a62fbb4a43947a20bd4c77547854b"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.519057 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.536751 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-tvt5b" event={"ID":"3aae1458-34de-4abf-b57e-8d3aefaeb644","Type":"ContainerStarted","Data":"82444fc9eded26f290256735e8522179e503aa75431644537a83766024d7eb40"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.542123 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fbsvl" Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.543423 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8598s" event={"ID":"dd0a0a80-0cd2-4efe-b988-8f06409dbfac","Type":"ContainerStarted","Data":"7c07691f73c813ea800a22b53264aa39afe349d20083ad813800b0ff06e2b507"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.543462 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8598s" event={"ID":"dd0a0a80-0cd2-4efe-b988-8f06409dbfac","Type":"ContainerStarted","Data":"1416291ea317ca3c6650c6ec763afaf80c259111cc9ea44a484048756da2a10a"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.545344 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-qdh9l" event={"ID":"4a6f9a69-a9b0-4da3-b986-7fa2013592eb","Type":"ContainerStarted","Data":"75a1b6119a08e9fdb45a83504c6e00601051ff13804d20c3e8b3d074ccab1021"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.545361 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-qdh9l" event={"ID":"4a6f9a69-a9b0-4da3-b986-7fa2013592eb","Type":"ContainerStarted","Data":"b5f7920682917e465a20823a76f51cfa9a5c7a25117ff40b1b0f4872d0de8c65"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.545728 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.545765 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.555540 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2zqzn" event={"ID":"cf39a416-5cb7-4d33-b794-09fb70b25f4a","Type":"ContainerStarted","Data":"f553ec300c7715bf60b9cbdce88c5033c63103080d506b34e1b479feae300c63"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.564665 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-ckn5c" event={"ID":"0d1bd14e-f44d-4db3-bd1e-4e65033b9971","Type":"ContainerStarted","Data":"89b9c80047f94e9e7939dd66779a4414ab24875fcb29dc05614d57ae43445219"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.564939 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-ckn5c" event={"ID":"0d1bd14e-f44d-4db3-bd1e-4e65033b9971","Type":"ContainerStarted","Data":"b9ecffce1d9ca275df99de9d35ca38d71af9f54a44f4f370c6f93a74aadb30cd"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.565502 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-ckn5c" Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.569018 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:05 crc kubenswrapper[4710]: E1009 09:07:05.569969 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:06.069953335 +0000 UTC m=+149.560061732 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.576600 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-98nmz" event={"ID":"cbf04db7-2c4c-4e47-90ca-ba4d4faa1f0e","Type":"ContainerStarted","Data":"590415b461ab017176e4260d7b98ebf6bf3d667532b8a3a93729f0e3dfab1890"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.597093 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-l57dg" event={"ID":"df55607e-2c97-4bd5-b3e0-3a748c6482ef","Type":"ContainerStarted","Data":"ccc15da88a5bb5009797146a5bb91239164b5a2717938498d424921dcd0dcc28"} Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.597601 4710 patch_prober.go:28] interesting pod/downloads-7954f5f757-bmqbd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.597713 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-bmqbd" podUID="5edaffb3-e33b-45b9-89fa-39322022da37" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.605087 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.625252 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" podStartSLOduration=127.625239179 podStartE2EDuration="2m7.625239179s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:05.55902166 +0000 UTC m=+149.049130058" watchObservedRunningTime="2025-10-09 09:07:05.625239179 +0000 UTC m=+149.115347576" Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.674250 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:05 crc kubenswrapper[4710]: E1009 09:07:05.674553 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:06.174540063 +0000 UTC m=+149.664648461 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.694627 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-qdh9l" podStartSLOduration=127.694608032 podStartE2EDuration="2m7.694608032s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:05.691760069 +0000 UTC m=+149.181868465" watchObservedRunningTime="2025-10-09 09:07:05.694608032 +0000 UTC m=+149.184716428" Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.702897 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-dgsrr" Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.777678 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:05 crc kubenswrapper[4710]: E1009 09:07:05.777833 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:06.277811053 +0000 UTC m=+149.767919450 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.777966 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:05 crc kubenswrapper[4710]: E1009 09:07:05.778619 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:06.278608286 +0000 UTC m=+149.768716682 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.816864 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-vxwmd" podStartSLOduration=127.816849283 podStartE2EDuration="2m7.816849283s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:05.815701362 +0000 UTC m=+149.305809759" watchObservedRunningTime="2025-10-09 09:07:05.816849283 +0000 UTC m=+149.306957681" Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.878836 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:05 crc kubenswrapper[4710]: E1009 09:07:05.879021 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:06.378989224 +0000 UTC m=+149.869097620 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.879298 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:05 crc kubenswrapper[4710]: E1009 09:07:05.879563 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:06.379553266 +0000 UTC m=+149.869661663 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.949603 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-wng2k" podStartSLOduration=8.949587742 podStartE2EDuration="8.949587742s" podCreationTimestamp="2025-10-09 09:06:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:05.901926184 +0000 UTC m=+149.392034582" watchObservedRunningTime="2025-10-09 09:07:05.949587742 +0000 UTC m=+149.439696139" Oct 09 09:07:05 crc kubenswrapper[4710]: I1009 09:07:05.980270 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:05 crc kubenswrapper[4710]: E1009 09:07:05.980511 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:06.480498307 +0000 UTC m=+149.970606704 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.020360 4710 patch_prober.go:28] interesting pod/router-default-5444994796-qp498 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 09:07:06 crc kubenswrapper[4710]: [-]has-synced failed: reason withheld Oct 09 09:07:06 crc kubenswrapper[4710]: [+]process-running ok Oct 09 09:07:06 crc kubenswrapper[4710]: healthz check failed Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.020413 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qp498" podUID="6e022686-4480-4610-9760-e1487bb99265" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.081128 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:06 crc kubenswrapper[4710]: E1009 09:07:06.081509 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:06.581493572 +0000 UTC m=+150.071601970 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.105734 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.182340 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:06 crc kubenswrapper[4710]: E1009 09:07:06.182528 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:06.6825005 +0000 UTC m=+150.172608896 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.182642 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:06 crc kubenswrapper[4710]: E1009 09:07:06.182934 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:06.68292536 +0000 UTC m=+150.173033757 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.218864 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-tvt5b" podStartSLOduration=128.218843014 podStartE2EDuration="2m8.218843014s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:06.037651687 +0000 UTC m=+149.527760084" watchObservedRunningTime="2025-10-09 09:07:06.218843014 +0000 UTC m=+149.708951411" Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.283454 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:06 crc kubenswrapper[4710]: E1009 09:07:06.283617 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:06.783592899 +0000 UTC m=+150.273701295 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.283815 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:06 crc kubenswrapper[4710]: E1009 09:07:06.284124 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:06.784112447 +0000 UTC m=+150.274220844 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.336725 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-8598s" podStartSLOduration=129.336705961 podStartE2EDuration="2m9.336705961s" podCreationTimestamp="2025-10-09 09:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:06.22086975 +0000 UTC m=+149.710978147" watchObservedRunningTime="2025-10-09 09:07:06.336705961 +0000 UTC m=+149.826814358" Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.358582 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4jb5d" Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.385268 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:06 crc kubenswrapper[4710]: E1009 09:07:06.385388 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:06.885368402 +0000 UTC m=+150.375476800 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.385458 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:06 crc kubenswrapper[4710]: E1009 09:07:06.385749 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:06.885737928 +0000 UTC m=+150.375846335 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.486741 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:06 crc kubenswrapper[4710]: E1009 09:07:06.486889 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:06.986872567 +0000 UTC m=+150.476980964 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.487030 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:06 crc kubenswrapper[4710]: E1009 09:07:06.487262 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:06.987252362 +0000 UTC m=+150.477360759 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.588460 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:06 crc kubenswrapper[4710]: E1009 09:07:06.588922 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:07.088908412 +0000 UTC m=+150.579016808 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.601916 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5bjcn" event={"ID":"bd8accf5-de9b-4eeb-9a75-1a90317d72ea","Type":"ContainerStarted","Data":"80aa6b739c901c2dca0663adc470a453f9aea5777c48866c066fd1790c598368"} Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.602371 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5bjcn" Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.603747 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" event={"ID":"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac","Type":"ContainerStarted","Data":"1ab3517915c4b8078a12733f4f714f6a3a1ec7ec166c69827737b603f9295c32"} Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.607473 4710 patch_prober.go:28] interesting pod/downloads-7954f5f757-bmqbd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.607506 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-bmqbd" podUID="5edaffb3-e33b-45b9-89fa-39322022da37" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.690170 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:06 crc kubenswrapper[4710]: E1009 09:07:06.691121 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:07.1911054 +0000 UTC m=+150.681213797 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.708962 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-fbsvl" podStartSLOduration=128.70894372 podStartE2EDuration="2m8.70894372s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:06.708363868 +0000 UTC m=+150.198472265" watchObservedRunningTime="2025-10-09 09:07:06.70894372 +0000 UTC m=+150.199052116" Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.792355 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:06 crc kubenswrapper[4710]: E1009 09:07:06.792712 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:07.292695014 +0000 UTC m=+150.782803401 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.895896 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:06 crc kubenswrapper[4710]: E1009 09:07:06.896318 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:07.3962974 +0000 UTC m=+150.886405797 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.952316 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-s8wd8"] Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.953262 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s8wd8" Oct 09 09:07:06 crc kubenswrapper[4710]: W1009 09:07:06.975400 4710 reflector.go:561] object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g": failed to list *v1.Secret: secrets "certified-operators-dockercfg-4rs5g" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-marketplace": no relationship found between node 'crc' and this object Oct 09 09:07:06 crc kubenswrapper[4710]: E1009 09:07:06.975675 4710 reflector.go:158] "Unhandled Error" err="object-\"openshift-marketplace\"/\"certified-operators-dockercfg-4rs5g\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"certified-operators-dockercfg-4rs5g\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-marketplace\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.993889 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-l57dg" podStartSLOduration=128.993871042 podStartE2EDuration="2m8.993871042s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:06.992828218 +0000 UTC m=+150.482936616" watchObservedRunningTime="2025-10-09 09:07:06.993871042 +0000 UTC m=+150.483979439" Oct 09 09:07:06 crc kubenswrapper[4710]: I1009 09:07:06.996981 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:06 crc kubenswrapper[4710]: E1009 09:07:06.997334 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:07.497315878 +0000 UTC m=+150.987424275 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.011006 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s8wd8"] Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.027634 4710 patch_prober.go:28] interesting pod/router-default-5444994796-qp498 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 09:07:07 crc kubenswrapper[4710]: [-]has-synced failed: reason withheld Oct 09 09:07:07 crc kubenswrapper[4710]: [+]process-running ok Oct 09 09:07:07 crc kubenswrapper[4710]: healthz check failed Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.027697 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qp498" podUID="6e022686-4480-4610-9760-e1487bb99265" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.077870 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5bjcn" podStartSLOduration=130.077856197 podStartE2EDuration="2m10.077856197s" podCreationTimestamp="2025-10-09 09:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:07.063283346 +0000 UTC m=+150.553391743" watchObservedRunningTime="2025-10-09 09:07:07.077856197 +0000 UTC m=+150.567964595" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.099072 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e73b2237-e967-49cc-9368-c670f2749a60-utilities\") pod \"certified-operators-s8wd8\" (UID: \"e73b2237-e967-49cc-9368-c670f2749a60\") " pod="openshift-marketplace/certified-operators-s8wd8" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.099148 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e73b2237-e967-49cc-9368-c670f2749a60-catalog-content\") pod \"certified-operators-s8wd8\" (UID: \"e73b2237-e967-49cc-9368-c670f2749a60\") " pod="openshift-marketplace/certified-operators-s8wd8" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.099173 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.099236 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rvzv\" (UniqueName: \"kubernetes.io/projected/e73b2237-e967-49cc-9368-c670f2749a60-kube-api-access-8rvzv\") pod \"certified-operators-s8wd8\" (UID: \"e73b2237-e967-49cc-9368-c670f2749a60\") " pod="openshift-marketplace/certified-operators-s8wd8" Oct 09 09:07:07 crc kubenswrapper[4710]: E1009 09:07:07.099619 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:07.599606643 +0000 UTC m=+151.089715040 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.126131 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-ckn5c" podStartSLOduration=10.126114508 podStartE2EDuration="10.126114508s" podCreationTimestamp="2025-10-09 09:06:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:07.116117363 +0000 UTC m=+150.606225761" watchObservedRunningTime="2025-10-09 09:07:07.126114508 +0000 UTC m=+150.616222894" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.126288 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-q9t84"] Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.127651 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q9t84" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.158416 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.199969 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.200300 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e73b2237-e967-49cc-9368-c670f2749a60-catalog-content\") pod \"certified-operators-s8wd8\" (UID: \"e73b2237-e967-49cc-9368-c670f2749a60\") " pod="openshift-marketplace/certified-operators-s8wd8" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.200408 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db9cdb3d-9888-46e4-a5db-710577557a80-catalog-content\") pod \"community-operators-q9t84\" (UID: \"db9cdb3d-9888-46e4-a5db-710577557a80\") " pod="openshift-marketplace/community-operators-q9t84" Oct 09 09:07:07 crc kubenswrapper[4710]: E1009 09:07:07.200460 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:07.700420537 +0000 UTC m=+151.190528934 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.200564 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rvzv\" (UniqueName: \"kubernetes.io/projected/e73b2237-e967-49cc-9368-c670f2749a60-kube-api-access-8rvzv\") pod \"certified-operators-s8wd8\" (UID: \"e73b2237-e967-49cc-9368-c670f2749a60\") " pod="openshift-marketplace/certified-operators-s8wd8" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.200661 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db9cdb3d-9888-46e4-a5db-710577557a80-utilities\") pod \"community-operators-q9t84\" (UID: \"db9cdb3d-9888-46e4-a5db-710577557a80\") " pod="openshift-marketplace/community-operators-q9t84" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.200724 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2z2x\" (UniqueName: \"kubernetes.io/projected/db9cdb3d-9888-46e4-a5db-710577557a80-kube-api-access-c2z2x\") pod \"community-operators-q9t84\" (UID: \"db9cdb3d-9888-46e4-a5db-710577557a80\") " pod="openshift-marketplace/community-operators-q9t84" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.200793 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e73b2237-e967-49cc-9368-c670f2749a60-utilities\") pod \"certified-operators-s8wd8\" (UID: \"e73b2237-e967-49cc-9368-c670f2749a60\") " pod="openshift-marketplace/certified-operators-s8wd8" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.200822 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e73b2237-e967-49cc-9368-c670f2749a60-catalog-content\") pod \"certified-operators-s8wd8\" (UID: \"e73b2237-e967-49cc-9368-c670f2749a60\") " pod="openshift-marketplace/certified-operators-s8wd8" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.201163 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e73b2237-e967-49cc-9368-c670f2749a60-utilities\") pod \"certified-operators-s8wd8\" (UID: \"e73b2237-e967-49cc-9368-c670f2749a60\") " pod="openshift-marketplace/certified-operators-s8wd8" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.202759 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q9t84"] Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.243693 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rvzv\" (UniqueName: \"kubernetes.io/projected/e73b2237-e967-49cc-9368-c670f2749a60-kube-api-access-8rvzv\") pod \"certified-operators-s8wd8\" (UID: \"e73b2237-e967-49cc-9368-c670f2749a60\") " pod="openshift-marketplace/certified-operators-s8wd8" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.291275 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4njdg"] Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.292064 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4njdg" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.301999 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db9cdb3d-9888-46e4-a5db-710577557a80-utilities\") pod \"community-operators-q9t84\" (UID: \"db9cdb3d-9888-46e4-a5db-710577557a80\") " pod="openshift-marketplace/community-operators-q9t84" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.302040 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2z2x\" (UniqueName: \"kubernetes.io/projected/db9cdb3d-9888-46e4-a5db-710577557a80-kube-api-access-c2z2x\") pod \"community-operators-q9t84\" (UID: \"db9cdb3d-9888-46e4-a5db-710577557a80\") " pod="openshift-marketplace/community-operators-q9t84" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.302105 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.302139 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db9cdb3d-9888-46e4-a5db-710577557a80-catalog-content\") pod \"community-operators-q9t84\" (UID: \"db9cdb3d-9888-46e4-a5db-710577557a80\") " pod="openshift-marketplace/community-operators-q9t84" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.302405 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db9cdb3d-9888-46e4-a5db-710577557a80-utilities\") pod \"community-operators-q9t84\" (UID: \"db9cdb3d-9888-46e4-a5db-710577557a80\") " pod="openshift-marketplace/community-operators-q9t84" Oct 09 09:07:07 crc kubenswrapper[4710]: E1009 09:07:07.302515 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:07.802501337 +0000 UTC m=+151.292609735 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.302527 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db9cdb3d-9888-46e4-a5db-710577557a80-catalog-content\") pod \"community-operators-q9t84\" (UID: \"db9cdb3d-9888-46e4-a5db-710577557a80\") " pod="openshift-marketplace/community-operators-q9t84" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.334539 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4njdg"] Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.387010 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2z2x\" (UniqueName: \"kubernetes.io/projected/db9cdb3d-9888-46e4-a5db-710577557a80-kube-api-access-c2z2x\") pod \"community-operators-q9t84\" (UID: \"db9cdb3d-9888-46e4-a5db-710577557a80\") " pod="openshift-marketplace/community-operators-q9t84" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.403118 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:07 crc kubenswrapper[4710]: E1009 09:07:07.403399 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:07.903373861 +0000 UTC m=+151.393482258 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.403561 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mt2fd\" (UniqueName: \"kubernetes.io/projected/faced6b4-369b-4866-8fe7-e9c80f9fc52b-kube-api-access-mt2fd\") pod \"certified-operators-4njdg\" (UID: \"faced6b4-369b-4866-8fe7-e9c80f9fc52b\") " pod="openshift-marketplace/certified-operators-4njdg" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.403733 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.403849 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/faced6b4-369b-4866-8fe7-e9c80f9fc52b-catalog-content\") pod \"certified-operators-4njdg\" (UID: \"faced6b4-369b-4866-8fe7-e9c80f9fc52b\") " pod="openshift-marketplace/certified-operators-4njdg" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.404047 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/faced6b4-369b-4866-8fe7-e9c80f9fc52b-utilities\") pod \"certified-operators-4njdg\" (UID: \"faced6b4-369b-4866-8fe7-e9c80f9fc52b\") " pod="openshift-marketplace/certified-operators-4njdg" Oct 09 09:07:07 crc kubenswrapper[4710]: E1009 09:07:07.404069 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:07.90405251 +0000 UTC m=+151.394160907 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.450723 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q9t84" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.509441 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.510101 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mt2fd\" (UniqueName: \"kubernetes.io/projected/faced6b4-369b-4866-8fe7-e9c80f9fc52b-kube-api-access-mt2fd\") pod \"certified-operators-4njdg\" (UID: \"faced6b4-369b-4866-8fe7-e9c80f9fc52b\") " pod="openshift-marketplace/certified-operators-4njdg" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.510217 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/faced6b4-369b-4866-8fe7-e9c80f9fc52b-catalog-content\") pod \"certified-operators-4njdg\" (UID: \"faced6b4-369b-4866-8fe7-e9c80f9fc52b\") " pod="openshift-marketplace/certified-operators-4njdg" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.510330 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/faced6b4-369b-4866-8fe7-e9c80f9fc52b-utilities\") pod \"certified-operators-4njdg\" (UID: \"faced6b4-369b-4866-8fe7-e9c80f9fc52b\") " pod="openshift-marketplace/certified-operators-4njdg" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.510776 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/faced6b4-369b-4866-8fe7-e9c80f9fc52b-utilities\") pod \"certified-operators-4njdg\" (UID: \"faced6b4-369b-4866-8fe7-e9c80f9fc52b\") " pod="openshift-marketplace/certified-operators-4njdg" Oct 09 09:07:07 crc kubenswrapper[4710]: E1009 09:07:07.510942 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:08.010920423 +0000 UTC m=+151.501028821 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.515488 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7w7ww"] Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.516835 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7w7ww" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.517149 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/faced6b4-369b-4866-8fe7-e9c80f9fc52b-catalog-content\") pod \"certified-operators-4njdg\" (UID: \"faced6b4-369b-4866-8fe7-e9c80f9fc52b\") " pod="openshift-marketplace/certified-operators-4njdg" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.534577 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7w7ww"] Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.548046 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mt2fd\" (UniqueName: \"kubernetes.io/projected/faced6b4-369b-4866-8fe7-e9c80f9fc52b-kube-api-access-mt2fd\") pod \"certified-operators-4njdg\" (UID: \"faced6b4-369b-4866-8fe7-e9c80f9fc52b\") " pod="openshift-marketplace/certified-operators-4njdg" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.613208 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66tmt\" (UniqueName: \"kubernetes.io/projected/b67ac216-99a7-41b5-9f03-65af85f1667e-kube-api-access-66tmt\") pod \"community-operators-7w7ww\" (UID: \"b67ac216-99a7-41b5-9f03-65af85f1667e\") " pod="openshift-marketplace/community-operators-7w7ww" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.613255 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b67ac216-99a7-41b5-9f03-65af85f1667e-utilities\") pod \"community-operators-7w7ww\" (UID: \"b67ac216-99a7-41b5-9f03-65af85f1667e\") " pod="openshift-marketplace/community-operators-7w7ww" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.613285 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.613320 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b67ac216-99a7-41b5-9f03-65af85f1667e-catalog-content\") pod \"community-operators-7w7ww\" (UID: \"b67ac216-99a7-41b5-9f03-65af85f1667e\") " pod="openshift-marketplace/community-operators-7w7ww" Oct 09 09:07:07 crc kubenswrapper[4710]: E1009 09:07:07.613649 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:08.11363711 +0000 UTC m=+151.603745507 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.632103 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" event={"ID":"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac","Type":"ContainerStarted","Data":"3663912c1a957a57c97e4f9a8eae292b638e6eb8f4c4f55c98a31bf6196a6c66"} Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.714612 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.714947 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b67ac216-99a7-41b5-9f03-65af85f1667e-utilities\") pod \"community-operators-7w7ww\" (UID: \"b67ac216-99a7-41b5-9f03-65af85f1667e\") " pod="openshift-marketplace/community-operators-7w7ww" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.715021 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b67ac216-99a7-41b5-9f03-65af85f1667e-catalog-content\") pod \"community-operators-7w7ww\" (UID: \"b67ac216-99a7-41b5-9f03-65af85f1667e\") " pod="openshift-marketplace/community-operators-7w7ww" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.715262 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66tmt\" (UniqueName: \"kubernetes.io/projected/b67ac216-99a7-41b5-9f03-65af85f1667e-kube-api-access-66tmt\") pod \"community-operators-7w7ww\" (UID: \"b67ac216-99a7-41b5-9f03-65af85f1667e\") " pod="openshift-marketplace/community-operators-7w7ww" Oct 09 09:07:07 crc kubenswrapper[4710]: E1009 09:07:07.716087 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:08.216069541 +0000 UTC m=+151.706177939 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.717236 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b67ac216-99a7-41b5-9f03-65af85f1667e-utilities\") pod \"community-operators-7w7ww\" (UID: \"b67ac216-99a7-41b5-9f03-65af85f1667e\") " pod="openshift-marketplace/community-operators-7w7ww" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.718414 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b67ac216-99a7-41b5-9f03-65af85f1667e-catalog-content\") pod \"community-operators-7w7ww\" (UID: \"b67ac216-99a7-41b5-9f03-65af85f1667e\") " pod="openshift-marketplace/community-operators-7w7ww" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.757315 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66tmt\" (UniqueName: \"kubernetes.io/projected/b67ac216-99a7-41b5-9f03-65af85f1667e-kube-api-access-66tmt\") pod \"community-operators-7w7ww\" (UID: \"b67ac216-99a7-41b5-9f03-65af85f1667e\") " pod="openshift-marketplace/community-operators-7w7ww" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.816207 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:07 crc kubenswrapper[4710]: E1009 09:07:07.816584 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:08.31657221 +0000 UTC m=+151.806680607 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.840595 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7w7ww" Oct 09 09:07:07 crc kubenswrapper[4710]: I1009 09:07:07.917503 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:07 crc kubenswrapper[4710]: E1009 09:07:07.917746 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:08.417733948 +0000 UTC m=+151.907842345 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.018562 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:08 crc kubenswrapper[4710]: E1009 09:07:08.018808 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:08.518797823 +0000 UTC m=+152.008906211 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.020146 4710 patch_prober.go:28] interesting pod/router-default-5444994796-qp498 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 09:07:08 crc kubenswrapper[4710]: [-]has-synced failed: reason withheld Oct 09 09:07:08 crc kubenswrapper[4710]: [+]process-running ok Oct 09 09:07:08 crc kubenswrapper[4710]: healthz check failed Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.020175 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qp498" podUID="6e022686-4480-4610-9760-e1487bb99265" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.037298 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q9t84"] Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.070998 4710 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.121996 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:08 crc kubenswrapper[4710]: E1009 09:07:08.122096 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:08.622080005 +0000 UTC m=+152.112188402 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.122373 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:08 crc kubenswrapper[4710]: E1009 09:07:08.122641 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:08.622634289 +0000 UTC m=+152.112742686 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.224114 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:08 crc kubenswrapper[4710]: E1009 09:07:08.224274 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:08.724245855 +0000 UTC m=+152.214354251 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.224601 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:08 crc kubenswrapper[4710]: E1009 09:07:08.224994 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:08.724976861 +0000 UTC m=+152.215085258 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.288810 4710 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openshift-marketplace/certified-operators-s8wd8" secret="" err="failed to sync secret cache: timed out waiting for the condition" Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.288886 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s8wd8" Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.322932 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7w7ww"] Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.327954 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:08 crc kubenswrapper[4710]: E1009 09:07:08.328202 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:08.8281774 +0000 UTC m=+152.318285786 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.328240 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:08 crc kubenswrapper[4710]: E1009 09:07:08.328486 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:08.828479227 +0000 UTC m=+152.318587624 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:08 crc kubenswrapper[4710]: W1009 09:07:08.352696 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb67ac216_99a7_41b5_9f03_65af85f1667e.slice/crio-c04b1a94f3b74da05c4700375fb32833019c082e8ba9fb221d6a7db2f2f67e92 WatchSource:0}: Error finding container c04b1a94f3b74da05c4700375fb32833019c082e8ba9fb221d6a7db2f2f67e92: Status 404 returned error can't find the container with id c04b1a94f3b74da05c4700375fb32833019c082e8ba9fb221d6a7db2f2f67e92 Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.428694 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:08 crc kubenswrapper[4710]: E1009 09:07:08.428937 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:08.928913958 +0000 UTC m=+152.419022355 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.429132 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:08 crc kubenswrapper[4710]: E1009 09:07:08.429453 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:08.929425431 +0000 UTC m=+152.419533828 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.507158 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.516565 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4njdg" Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.530638 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:08 crc kubenswrapper[4710]: E1009 09:07:08.530739 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:09.030717495 +0000 UTC m=+152.520825892 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.530838 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:08 crc kubenswrapper[4710]: E1009 09:07:08.531171 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:09.03116091 +0000 UTC m=+152.521269307 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.632169 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:08 crc kubenswrapper[4710]: E1009 09:07:08.632312 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:09.132287653 +0000 UTC m=+152.622396051 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.632381 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:08 crc kubenswrapper[4710]: E1009 09:07:08.632633 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:09.132622393 +0000 UTC m=+152.622730790 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.637573 4710 generic.go:334] "Generic (PLEG): container finished" podID="db9cdb3d-9888-46e4-a5db-710577557a80" containerID="b0380aa741e3056effb79810f424aabcb02b5baec0e213c44a7845d2a145ee3b" exitCode=0 Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.637638 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q9t84" event={"ID":"db9cdb3d-9888-46e4-a5db-710577557a80","Type":"ContainerDied","Data":"b0380aa741e3056effb79810f424aabcb02b5baec0e213c44a7845d2a145ee3b"} Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.637663 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q9t84" event={"ID":"db9cdb3d-9888-46e4-a5db-710577557a80","Type":"ContainerStarted","Data":"581a7b5b670186944e56e7eb5293981791f391b020b2e0ce708da812d04f4993"} Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.642674 4710 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.646672 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" event={"ID":"c7e33a03-fbfd-4c8e-a0c4-f2efcd3aedac","Type":"ContainerStarted","Data":"0c64a55159bdd3082e468d619ad2e0bd75f028ba5657652feac30f78d7db9d6d"} Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.651723 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7w7ww" event={"ID":"b67ac216-99a7-41b5-9f03-65af85f1667e","Type":"ContainerStarted","Data":"220038b488ebd0775fccb6ec39a897a75b79fc2782e5f5c55218c8a7ad97f0f6"} Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.651905 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7w7ww" event={"ID":"b67ac216-99a7-41b5-9f03-65af85f1667e","Type":"ContainerStarted","Data":"c04b1a94f3b74da05c4700375fb32833019c082e8ba9fb221d6a7db2f2f67e92"} Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.671906 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-5bjcn" Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.716999 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-nnk8n" podStartSLOduration=11.716980641 podStartE2EDuration="11.716980641s" podCreationTimestamp="2025-10-09 09:06:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:08.680297516 +0000 UTC m=+152.170405913" watchObservedRunningTime="2025-10-09 09:07:08.716980641 +0000 UTC m=+152.207089038" Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.733563 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:08 crc kubenswrapper[4710]: E1009 09:07:08.734780 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:09.234751023 +0000 UTC m=+152.724859420 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.835922 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:08 crc kubenswrapper[4710]: E1009 09:07:08.836614 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-09 09:07:09.336593013 +0000 UTC m=+152.826701410 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l9bsw" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.846408 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s8wd8"] Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.884446 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4njdg"] Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.936745 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:08 crc kubenswrapper[4710]: E1009 09:07:08.937031 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-09 09:07:09.437014548 +0000 UTC m=+152.927122945 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.951619 4710 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-09T09:07:08.071016743Z","Handler":null,"Name":""} Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.955384 4710 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 09 09:07:08 crc kubenswrapper[4710]: I1009 09:07:08.955412 4710 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.022701 4710 patch_prober.go:28] interesting pod/router-default-5444994796-qp498 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 09:07:09 crc kubenswrapper[4710]: [-]has-synced failed: reason withheld Oct 09 09:07:09 crc kubenswrapper[4710]: [+]process-running ok Oct 09 09:07:09 crc kubenswrapper[4710]: healthz check failed Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.024060 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qp498" podUID="6e022686-4480-4610-9760-e1487bb99265" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.039093 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.050257 4710 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.050309 4710 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.076343 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l9bsw\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.140850 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.162468 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.265351 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bjx77"] Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.266344 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bjx77" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.270715 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.318214 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.333134 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bjx77"] Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.344222 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e5a0c99-47a8-46dd-b869-511a979f4b8c-utilities\") pod \"redhat-marketplace-bjx77\" (UID: \"5e5a0c99-47a8-46dd-b869-511a979f4b8c\") " pod="openshift-marketplace/redhat-marketplace-bjx77" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.344346 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e5a0c99-47a8-46dd-b869-511a979f4b8c-catalog-content\") pod \"redhat-marketplace-bjx77\" (UID: \"5e5a0c99-47a8-46dd-b869-511a979f4b8c\") " pod="openshift-marketplace/redhat-marketplace-bjx77" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.344529 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25tpl\" (UniqueName: \"kubernetes.io/projected/5e5a0c99-47a8-46dd-b869-511a979f4b8c-kube-api-access-25tpl\") pod \"redhat-marketplace-bjx77\" (UID: \"5e5a0c99-47a8-46dd-b869-511a979f4b8c\") " pod="openshift-marketplace/redhat-marketplace-bjx77" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.445869 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e5a0c99-47a8-46dd-b869-511a979f4b8c-utilities\") pod \"redhat-marketplace-bjx77\" (UID: \"5e5a0c99-47a8-46dd-b869-511a979f4b8c\") " pod="openshift-marketplace/redhat-marketplace-bjx77" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.446205 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e5a0c99-47a8-46dd-b869-511a979f4b8c-catalog-content\") pod \"redhat-marketplace-bjx77\" (UID: \"5e5a0c99-47a8-46dd-b869-511a979f4b8c\") " pod="openshift-marketplace/redhat-marketplace-bjx77" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.446246 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25tpl\" (UniqueName: \"kubernetes.io/projected/5e5a0c99-47a8-46dd-b869-511a979f4b8c-kube-api-access-25tpl\") pod \"redhat-marketplace-bjx77\" (UID: \"5e5a0c99-47a8-46dd-b869-511a979f4b8c\") " pod="openshift-marketplace/redhat-marketplace-bjx77" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.447280 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e5a0c99-47a8-46dd-b869-511a979f4b8c-utilities\") pod \"redhat-marketplace-bjx77\" (UID: \"5e5a0c99-47a8-46dd-b869-511a979f4b8c\") " pod="openshift-marketplace/redhat-marketplace-bjx77" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.447584 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e5a0c99-47a8-46dd-b869-511a979f4b8c-catalog-content\") pod \"redhat-marketplace-bjx77\" (UID: \"5e5a0c99-47a8-46dd-b869-511a979f4b8c\") " pod="openshift-marketplace/redhat-marketplace-bjx77" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.464231 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25tpl\" (UniqueName: \"kubernetes.io/projected/5e5a0c99-47a8-46dd-b869-511a979f4b8c-kube-api-access-25tpl\") pod \"redhat-marketplace-bjx77\" (UID: \"5e5a0c99-47a8-46dd-b869-511a979f4b8c\") " pod="openshift-marketplace/redhat-marketplace-bjx77" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.481489 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l9bsw"] Oct 09 09:07:09 crc kubenswrapper[4710]: W1009 09:07:09.493301 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51644181_d79b_4704_873f_d3c13740f656.slice/crio-14decf36ae39e3fdc83a2d0ce358a1e15e5b6a70afc7f28c714e761080d8883f WatchSource:0}: Error finding container 14decf36ae39e3fdc83a2d0ce358a1e15e5b6a70afc7f28c714e761080d8883f: Status 404 returned error can't find the container with id 14decf36ae39e3fdc83a2d0ce358a1e15e5b6a70afc7f28c714e761080d8883f Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.577742 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bjx77" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.651799 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.651840 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.653860 4710 patch_prober.go:28] interesting pod/console-f9d7485db-5q44l container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.21:8443/health\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.653887 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-5q44l" podUID="0ee99015-bffc-4ffb-a91c-f941cd33acd7" containerName="console" probeResult="failure" output="Get \"https://10.217.0.21:8443/health\": dial tcp 10.217.0.21:8443: connect: connection refused" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.665460 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-sjp9w"] Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.666976 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sjp9w" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.675657 4710 generic.go:334] "Generic (PLEG): container finished" podID="b67ac216-99a7-41b5-9f03-65af85f1667e" containerID="220038b488ebd0775fccb6ec39a897a75b79fc2782e5f5c55218c8a7ad97f0f6" exitCode=0 Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.675717 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7w7ww" event={"ID":"b67ac216-99a7-41b5-9f03-65af85f1667e","Type":"ContainerDied","Data":"220038b488ebd0775fccb6ec39a897a75b79fc2782e5f5c55218c8a7ad97f0f6"} Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.682101 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" event={"ID":"51644181-d79b-4704-873f-d3c13740f656","Type":"ContainerStarted","Data":"01d132d91673f04ddecf36917250d44ce4b67af0cbd41706f4afd7596c5b6543"} Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.683789 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.683807 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" event={"ID":"51644181-d79b-4704-873f-d3c13740f656","Type":"ContainerStarted","Data":"14decf36ae39e3fdc83a2d0ce358a1e15e5b6a70afc7f28c714e761080d8883f"} Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.692273 4710 generic.go:334] "Generic (PLEG): container finished" podID="e73b2237-e967-49cc-9368-c670f2749a60" containerID="90150de71e4bf58f083ada524a239869821b08f7286d7cc16c88f213363cc518" exitCode=0 Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.692316 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s8wd8" event={"ID":"e73b2237-e967-49cc-9368-c670f2749a60","Type":"ContainerDied","Data":"90150de71e4bf58f083ada524a239869821b08f7286d7cc16c88f213363cc518"} Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.692331 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s8wd8" event={"ID":"e73b2237-e967-49cc-9368-c670f2749a60","Type":"ContainerStarted","Data":"aaa5379b750591c35e408cfaa0e982f01241ab634298b7b3548d58eabfd406de"} Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.693889 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sjp9w"] Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.698698 4710 generic.go:334] "Generic (PLEG): container finished" podID="2f367bf0-fb0c-4884-b874-d4a426a8ff5a" containerID="57693cd1cf50852dac8ec9a559e30eb932fbf87ee2f6b483640c94699816c4b5" exitCode=0 Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.698736 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx" event={"ID":"2f367bf0-fb0c-4884-b874-d4a426a8ff5a","Type":"ContainerDied","Data":"57693cd1cf50852dac8ec9a559e30eb932fbf87ee2f6b483640c94699816c4b5"} Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.701051 4710 generic.go:334] "Generic (PLEG): container finished" podID="faced6b4-369b-4866-8fe7-e9c80f9fc52b" containerID="46feb71983da243b898534aa07af96afeca695bc1e783c61e44a8ca2e5efda6a" exitCode=0 Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.701152 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4njdg" event={"ID":"faced6b4-369b-4866-8fe7-e9c80f9fc52b","Type":"ContainerDied","Data":"46feb71983da243b898534aa07af96afeca695bc1e783c61e44a8ca2e5efda6a"} Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.701267 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4njdg" event={"ID":"faced6b4-369b-4866-8fe7-e9c80f9fc52b","Type":"ContainerStarted","Data":"3544276a8acc3d6f5f6c7c65fc1947c7bea86aefb12959f0927606b45d01a255"} Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.726681 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" podStartSLOduration=131.726669479 podStartE2EDuration="2m11.726669479s" podCreationTimestamp="2025-10-09 09:04:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:09.72306904 +0000 UTC m=+153.213177436" watchObservedRunningTime="2025-10-09 09:07:09.726669479 +0000 UTC m=+153.216777877" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.755072 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01eaf904-9e41-4374-8fe8-ed5f5a4053ac-catalog-content\") pod \"redhat-marketplace-sjp9w\" (UID: \"01eaf904-9e41-4374-8fe8-ed5f5a4053ac\") " pod="openshift-marketplace/redhat-marketplace-sjp9w" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.755251 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01eaf904-9e41-4374-8fe8-ed5f5a4053ac-utilities\") pod \"redhat-marketplace-sjp9w\" (UID: \"01eaf904-9e41-4374-8fe8-ed5f5a4053ac\") " pod="openshift-marketplace/redhat-marketplace-sjp9w" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.755283 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tc47d\" (UniqueName: \"kubernetes.io/projected/01eaf904-9e41-4374-8fe8-ed5f5a4053ac-kube-api-access-tc47d\") pod \"redhat-marketplace-sjp9w\" (UID: \"01eaf904-9e41-4374-8fe8-ed5f5a4053ac\") " pod="openshift-marketplace/redhat-marketplace-sjp9w" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.784357 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.784419 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.802291 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.845909 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bjx77"] Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.869157 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01eaf904-9e41-4374-8fe8-ed5f5a4053ac-utilities\") pod \"redhat-marketplace-sjp9w\" (UID: \"01eaf904-9e41-4374-8fe8-ed5f5a4053ac\") " pod="openshift-marketplace/redhat-marketplace-sjp9w" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.869212 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tc47d\" (UniqueName: \"kubernetes.io/projected/01eaf904-9e41-4374-8fe8-ed5f5a4053ac-kube-api-access-tc47d\") pod \"redhat-marketplace-sjp9w\" (UID: \"01eaf904-9e41-4374-8fe8-ed5f5a4053ac\") " pod="openshift-marketplace/redhat-marketplace-sjp9w" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.869288 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01eaf904-9e41-4374-8fe8-ed5f5a4053ac-catalog-content\") pod \"redhat-marketplace-sjp9w\" (UID: \"01eaf904-9e41-4374-8fe8-ed5f5a4053ac\") " pod="openshift-marketplace/redhat-marketplace-sjp9w" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.870495 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01eaf904-9e41-4374-8fe8-ed5f5a4053ac-utilities\") pod \"redhat-marketplace-sjp9w\" (UID: \"01eaf904-9e41-4374-8fe8-ed5f5a4053ac\") " pod="openshift-marketplace/redhat-marketplace-sjp9w" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.873266 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01eaf904-9e41-4374-8fe8-ed5f5a4053ac-catalog-content\") pod \"redhat-marketplace-sjp9w\" (UID: \"01eaf904-9e41-4374-8fe8-ed5f5a4053ac\") " pod="openshift-marketplace/redhat-marketplace-sjp9w" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.896281 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tc47d\" (UniqueName: \"kubernetes.io/projected/01eaf904-9e41-4374-8fe8-ed5f5a4053ac-kube-api-access-tc47d\") pod \"redhat-marketplace-sjp9w\" (UID: \"01eaf904-9e41-4374-8fe8-ed5f5a4053ac\") " pod="openshift-marketplace/redhat-marketplace-sjp9w" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.915853 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.915891 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.934797 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.970586 4710 patch_prober.go:28] interesting pod/downloads-7954f5f757-bmqbd container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.970641 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-bmqbd" podUID="5edaffb3-e33b-45b9-89fa-39322022da37" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.975272 4710 patch_prober.go:28] interesting pod/downloads-7954f5f757-bmqbd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.975374 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-bmqbd" podUID="5edaffb3-e33b-45b9-89fa-39322022da37" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.976547 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.977286 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.979119 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.979129 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.988816 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 09 09:07:09 crc kubenswrapper[4710]: I1009 09:07:09.993839 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sjp9w" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.015474 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-qp498" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.018237 4710 patch_prober.go:28] interesting pod/router-default-5444994796-qp498 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 09:07:10 crc kubenswrapper[4710]: [-]has-synced failed: reason withheld Oct 09 09:07:10 crc kubenswrapper[4710]: [+]process-running ok Oct 09 09:07:10 crc kubenswrapper[4710]: healthz check failed Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.018315 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qp498" podUID="6e022686-4480-4610-9760-e1487bb99265" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.060278 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-l7l4g"] Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.061420 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l7l4g" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.063279 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.070651 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l7l4g"] Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.071985 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e172103-ff1a-4a40-984a-bc962737add5-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9e172103-ff1a-4a40-984a-bc962737add5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.072136 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e172103-ff1a-4a40-984a-bc962737add5-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9e172103-ff1a-4a40-984a-bc962737add5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.175223 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e172103-ff1a-4a40-984a-bc962737add5-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9e172103-ff1a-4a40-984a-bc962737add5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.175526 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e172103-ff1a-4a40-984a-bc962737add5-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9e172103-ff1a-4a40-984a-bc962737add5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.175648 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5e7c194-8f27-4279-a34b-d8a0a94bdb03-utilities\") pod \"redhat-operators-l7l4g\" (UID: \"c5e7c194-8f27-4279-a34b-d8a0a94bdb03\") " pod="openshift-marketplace/redhat-operators-l7l4g" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.175669 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5e7c194-8f27-4279-a34b-d8a0a94bdb03-catalog-content\") pod \"redhat-operators-l7l4g\" (UID: \"c5e7c194-8f27-4279-a34b-d8a0a94bdb03\") " pod="openshift-marketplace/redhat-operators-l7l4g" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.175719 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e172103-ff1a-4a40-984a-bc962737add5-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9e172103-ff1a-4a40-984a-bc962737add5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.175758 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fccbb\" (UniqueName: \"kubernetes.io/projected/c5e7c194-8f27-4279-a34b-d8a0a94bdb03-kube-api-access-fccbb\") pod \"redhat-operators-l7l4g\" (UID: \"c5e7c194-8f27-4279-a34b-d8a0a94bdb03\") " pod="openshift-marketplace/redhat-operators-l7l4g" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.196902 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sjp9w"] Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.201239 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e172103-ff1a-4a40-984a-bc962737add5-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9e172103-ff1a-4a40-984a-bc962737add5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 09:07:10 crc kubenswrapper[4710]: W1009 09:07:10.201982 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod01eaf904_9e41_4374_8fe8_ed5f5a4053ac.slice/crio-5e0c26fc79c980d3f37be0c39f68a9ae757f80f437bd66c0b5e057dfb7b9a030 WatchSource:0}: Error finding container 5e0c26fc79c980d3f37be0c39f68a9ae757f80f437bd66c0b5e057dfb7b9a030: Status 404 returned error can't find the container with id 5e0c26fc79c980d3f37be0c39f68a9ae757f80f437bd66c0b5e057dfb7b9a030 Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.269023 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tqk6t"] Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.271153 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tqk6t" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.275588 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tqk6t"] Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.277519 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5e7c194-8f27-4279-a34b-d8a0a94bdb03-utilities\") pod \"redhat-operators-l7l4g\" (UID: \"c5e7c194-8f27-4279-a34b-d8a0a94bdb03\") " pod="openshift-marketplace/redhat-operators-l7l4g" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.277561 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5e7c194-8f27-4279-a34b-d8a0a94bdb03-catalog-content\") pod \"redhat-operators-l7l4g\" (UID: \"c5e7c194-8f27-4279-a34b-d8a0a94bdb03\") " pod="openshift-marketplace/redhat-operators-l7l4g" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.277602 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fccbb\" (UniqueName: \"kubernetes.io/projected/c5e7c194-8f27-4279-a34b-d8a0a94bdb03-kube-api-access-fccbb\") pod \"redhat-operators-l7l4g\" (UID: \"c5e7c194-8f27-4279-a34b-d8a0a94bdb03\") " pod="openshift-marketplace/redhat-operators-l7l4g" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.278547 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5e7c194-8f27-4279-a34b-d8a0a94bdb03-utilities\") pod \"redhat-operators-l7l4g\" (UID: \"c5e7c194-8f27-4279-a34b-d8a0a94bdb03\") " pod="openshift-marketplace/redhat-operators-l7l4g" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.278663 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5e7c194-8f27-4279-a34b-d8a0a94bdb03-catalog-content\") pod \"redhat-operators-l7l4g\" (UID: \"c5e7c194-8f27-4279-a34b-d8a0a94bdb03\") " pod="openshift-marketplace/redhat-operators-l7l4g" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.293059 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fccbb\" (UniqueName: \"kubernetes.io/projected/c5e7c194-8f27-4279-a34b-d8a0a94bdb03-kube-api-access-fccbb\") pod \"redhat-operators-l7l4g\" (UID: \"c5e7c194-8f27-4279-a34b-d8a0a94bdb03\") " pod="openshift-marketplace/redhat-operators-l7l4g" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.325202 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.378744 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ph8h\" (UniqueName: \"kubernetes.io/projected/542f16e8-eda7-4525-ba86-cc890a369f30-kube-api-access-2ph8h\") pod \"redhat-operators-tqk6t\" (UID: \"542f16e8-eda7-4525-ba86-cc890a369f30\") " pod="openshift-marketplace/redhat-operators-tqk6t" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.379062 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/542f16e8-eda7-4525-ba86-cc890a369f30-utilities\") pod \"redhat-operators-tqk6t\" (UID: \"542f16e8-eda7-4525-ba86-cc890a369f30\") " pod="openshift-marketplace/redhat-operators-tqk6t" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.379244 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/542f16e8-eda7-4525-ba86-cc890a369f30-catalog-content\") pod \"redhat-operators-tqk6t\" (UID: \"542f16e8-eda7-4525-ba86-cc890a369f30\") " pod="openshift-marketplace/redhat-operators-tqk6t" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.389550 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l7l4g" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.481252 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/542f16e8-eda7-4525-ba86-cc890a369f30-catalog-content\") pod \"redhat-operators-tqk6t\" (UID: \"542f16e8-eda7-4525-ba86-cc890a369f30\") " pod="openshift-marketplace/redhat-operators-tqk6t" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.481290 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ph8h\" (UniqueName: \"kubernetes.io/projected/542f16e8-eda7-4525-ba86-cc890a369f30-kube-api-access-2ph8h\") pod \"redhat-operators-tqk6t\" (UID: \"542f16e8-eda7-4525-ba86-cc890a369f30\") " pod="openshift-marketplace/redhat-operators-tqk6t" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.481314 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/542f16e8-eda7-4525-ba86-cc890a369f30-utilities\") pod \"redhat-operators-tqk6t\" (UID: \"542f16e8-eda7-4525-ba86-cc890a369f30\") " pod="openshift-marketplace/redhat-operators-tqk6t" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.482079 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/542f16e8-eda7-4525-ba86-cc890a369f30-utilities\") pod \"redhat-operators-tqk6t\" (UID: \"542f16e8-eda7-4525-ba86-cc890a369f30\") " pod="openshift-marketplace/redhat-operators-tqk6t" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.482308 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/542f16e8-eda7-4525-ba86-cc890a369f30-catalog-content\") pod \"redhat-operators-tqk6t\" (UID: \"542f16e8-eda7-4525-ba86-cc890a369f30\") " pod="openshift-marketplace/redhat-operators-tqk6t" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.504581 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ph8h\" (UniqueName: \"kubernetes.io/projected/542f16e8-eda7-4525-ba86-cc890a369f30-kube-api-access-2ph8h\") pod \"redhat-operators-tqk6t\" (UID: \"542f16e8-eda7-4525-ba86-cc890a369f30\") " pod="openshift-marketplace/redhat-operators-tqk6t" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.589743 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.590640 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tqk6t" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.736489 4710 generic.go:334] "Generic (PLEG): container finished" podID="01eaf904-9e41-4374-8fe8-ed5f5a4053ac" containerID="229a10720cd460085c427398fcf2891ce83a80342a723a0b223736756fcb882c" exitCode=0 Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.736559 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sjp9w" event={"ID":"01eaf904-9e41-4374-8fe8-ed5f5a4053ac","Type":"ContainerDied","Data":"229a10720cd460085c427398fcf2891ce83a80342a723a0b223736756fcb882c"} Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.736584 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sjp9w" event={"ID":"01eaf904-9e41-4374-8fe8-ed5f5a4053ac","Type":"ContainerStarted","Data":"5e0c26fc79c980d3f37be0c39f68a9ae757f80f437bd66c0b5e057dfb7b9a030"} Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.769705 4710 generic.go:334] "Generic (PLEG): container finished" podID="5e5a0c99-47a8-46dd-b869-511a979f4b8c" containerID="8ab9a6c2cf5e7823006d380514d28b03659003cd3d1823aa387545547c5806b8" exitCode=0 Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.769805 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bjx77" event={"ID":"5e5a0c99-47a8-46dd-b869-511a979f4b8c","Type":"ContainerDied","Data":"8ab9a6c2cf5e7823006d380514d28b03659003cd3d1823aa387545547c5806b8"} Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.769840 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bjx77" event={"ID":"5e5a0c99-47a8-46dd-b869-511a979f4b8c","Type":"ContainerStarted","Data":"37619aa2f3634d611dad8aebeef7327e23485525996c5439ec7b9d51eb0db4c0"} Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.785302 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9e172103-ff1a-4a40-984a-bc962737add5","Type":"ContainerStarted","Data":"64ca1dbf57508dec21b2d1b07c527bb3bc7c6a64c9c5fc9dfd3455788f3b6447"} Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.792255 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-8598s" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.810123 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v8np5" Oct 09 09:07:10 crc kubenswrapper[4710]: I1009 09:07:10.872793 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.002049 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l7l4g"] Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.027584 4710 patch_prober.go:28] interesting pod/router-default-5444994796-qp498 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 09 09:07:11 crc kubenswrapper[4710]: [-]has-synced failed: reason withheld Oct 09 09:07:11 crc kubenswrapper[4710]: [+]process-running ok Oct 09 09:07:11 crc kubenswrapper[4710]: healthz check failed Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.027906 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-qp498" podUID="6e022686-4480-4610-9760-e1487bb99265" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.066222 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tqk6t"] Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.239866 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx" Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.309070 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6n4r\" (UniqueName: \"kubernetes.io/projected/2f367bf0-fb0c-4884-b874-d4a426a8ff5a-kube-api-access-p6n4r\") pod \"2f367bf0-fb0c-4884-b874-d4a426a8ff5a\" (UID: \"2f367bf0-fb0c-4884-b874-d4a426a8ff5a\") " Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.309127 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2f367bf0-fb0c-4884-b874-d4a426a8ff5a-config-volume\") pod \"2f367bf0-fb0c-4884-b874-d4a426a8ff5a\" (UID: \"2f367bf0-fb0c-4884-b874-d4a426a8ff5a\") " Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.309171 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2f367bf0-fb0c-4884-b874-d4a426a8ff5a-secret-volume\") pod \"2f367bf0-fb0c-4884-b874-d4a426a8ff5a\" (UID: \"2f367bf0-fb0c-4884-b874-d4a426a8ff5a\") " Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.311281 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f367bf0-fb0c-4884-b874-d4a426a8ff5a-config-volume" (OuterVolumeSpecName: "config-volume") pod "2f367bf0-fb0c-4884-b874-d4a426a8ff5a" (UID: "2f367bf0-fb0c-4884-b874-d4a426a8ff5a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.319952 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f367bf0-fb0c-4884-b874-d4a426a8ff5a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2f367bf0-fb0c-4884-b874-d4a426a8ff5a" (UID: "2f367bf0-fb0c-4884-b874-d4a426a8ff5a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.322335 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f367bf0-fb0c-4884-b874-d4a426a8ff5a-kube-api-access-p6n4r" (OuterVolumeSpecName: "kube-api-access-p6n4r") pod "2f367bf0-fb0c-4884-b874-d4a426a8ff5a" (UID: "2f367bf0-fb0c-4884-b874-d4a426a8ff5a"). InnerVolumeSpecName "kube-api-access-p6n4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.418898 4710 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2f367bf0-fb0c-4884-b874-d4a426a8ff5a-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.418927 4710 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2f367bf0-fb0c-4884-b874-d4a426a8ff5a-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.418937 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6n4r\" (UniqueName: \"kubernetes.io/projected/2f367bf0-fb0c-4884-b874-d4a426a8ff5a-kube-api-access-p6n4r\") on node \"crc\" DevicePath \"\"" Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.850127 4710 generic.go:334] "Generic (PLEG): container finished" podID="9e172103-ff1a-4a40-984a-bc962737add5" containerID="2288428283cfb713cc465dd52c69c94ac72fe8bf5a319240e06bef38fade79a1" exitCode=0 Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.850311 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9e172103-ff1a-4a40-984a-bc962737add5","Type":"ContainerDied","Data":"2288428283cfb713cc465dd52c69c94ac72fe8bf5a319240e06bef38fade79a1"} Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.882202 4710 generic.go:334] "Generic (PLEG): container finished" podID="542f16e8-eda7-4525-ba86-cc890a369f30" containerID="4e2bfa3172f97e42d63bf6acac501be1a646fce6a1876403f3c5d7495b1d4e0e" exitCode=0 Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.882300 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tqk6t" event={"ID":"542f16e8-eda7-4525-ba86-cc890a369f30","Type":"ContainerDied","Data":"4e2bfa3172f97e42d63bf6acac501be1a646fce6a1876403f3c5d7495b1d4e0e"} Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.882346 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tqk6t" event={"ID":"542f16e8-eda7-4525-ba86-cc890a369f30","Type":"ContainerStarted","Data":"9a80fc6af46e6effa927c2acd6fb236094e8486370071fb7a2403171f5737e31"} Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.897235 4710 generic.go:334] "Generic (PLEG): container finished" podID="c5e7c194-8f27-4279-a34b-d8a0a94bdb03" containerID="b6626dce19dbf4baed95399432fcc5f8b174267e8b56caa9b1131f7475e151e5" exitCode=0 Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.897403 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l7l4g" event={"ID":"c5e7c194-8f27-4279-a34b-d8a0a94bdb03","Type":"ContainerDied","Data":"b6626dce19dbf4baed95399432fcc5f8b174267e8b56caa9b1131f7475e151e5"} Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.897496 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l7l4g" event={"ID":"c5e7c194-8f27-4279-a34b-d8a0a94bdb03","Type":"ContainerStarted","Data":"23c4740dcbca2fbdfa699968458d28ce944df17887322deaa1e8d59420eba6bd"} Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.929261 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx" Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.929378 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx" event={"ID":"2f367bf0-fb0c-4884-b874-d4a426a8ff5a","Type":"ContainerDied","Data":"3fc19909af0c1beab96444d575b964e8bba7b415e3a00a60b7e48dc440eb16e0"} Oct 09 09:07:11 crc kubenswrapper[4710]: I1009 09:07:11.929414 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3fc19909af0c1beab96444d575b964e8bba7b415e3a00a60b7e48dc440eb16e0" Oct 09 09:07:12 crc kubenswrapper[4710]: I1009 09:07:12.021098 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-qp498" Oct 09 09:07:12 crc kubenswrapper[4710]: I1009 09:07:12.027656 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-qp498" Oct 09 09:07:13 crc kubenswrapper[4710]: I1009 09:07:13.329803 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 09:07:13 crc kubenswrapper[4710]: I1009 09:07:13.368061 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e172103-ff1a-4a40-984a-bc962737add5-kubelet-dir\") pod \"9e172103-ff1a-4a40-984a-bc962737add5\" (UID: \"9e172103-ff1a-4a40-984a-bc962737add5\") " Oct 09 09:07:13 crc kubenswrapper[4710]: I1009 09:07:13.368135 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e172103-ff1a-4a40-984a-bc962737add5-kube-api-access\") pod \"9e172103-ff1a-4a40-984a-bc962737add5\" (UID: \"9e172103-ff1a-4a40-984a-bc962737add5\") " Oct 09 09:07:13 crc kubenswrapper[4710]: I1009 09:07:13.369503 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9e172103-ff1a-4a40-984a-bc962737add5-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "9e172103-ff1a-4a40-984a-bc962737add5" (UID: "9e172103-ff1a-4a40-984a-bc962737add5"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:07:13 crc kubenswrapper[4710]: I1009 09:07:13.388193 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e172103-ff1a-4a40-984a-bc962737add5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "9e172103-ff1a-4a40-984a-bc962737add5" (UID: "9e172103-ff1a-4a40-984a-bc962737add5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:07:13 crc kubenswrapper[4710]: I1009 09:07:13.471709 4710 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e172103-ff1a-4a40-984a-bc962737add5-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 09 09:07:13 crc kubenswrapper[4710]: I1009 09:07:13.471749 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e172103-ff1a-4a40-984a-bc962737add5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 09 09:07:13 crc kubenswrapper[4710]: I1009 09:07:13.990824 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9e172103-ff1a-4a40-984a-bc962737add5","Type":"ContainerDied","Data":"64ca1dbf57508dec21b2d1b07c527bb3bc7c6a64c9c5fc9dfd3455788f3b6447"} Oct 09 09:07:13 crc kubenswrapper[4710]: I1009 09:07:13.990865 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="64ca1dbf57508dec21b2d1b07c527bb3bc7c6a64c9c5fc9dfd3455788f3b6447" Oct 09 09:07:13 crc kubenswrapper[4710]: I1009 09:07:13.990884 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 09 09:07:14 crc kubenswrapper[4710]: I1009 09:07:14.022330 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 09 09:07:14 crc kubenswrapper[4710]: E1009 09:07:14.022553 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f367bf0-fb0c-4884-b874-d4a426a8ff5a" containerName="collect-profiles" Oct 09 09:07:14 crc kubenswrapper[4710]: I1009 09:07:14.022565 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f367bf0-fb0c-4884-b874-d4a426a8ff5a" containerName="collect-profiles" Oct 09 09:07:14 crc kubenswrapper[4710]: E1009 09:07:14.022575 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e172103-ff1a-4a40-984a-bc962737add5" containerName="pruner" Oct 09 09:07:14 crc kubenswrapper[4710]: I1009 09:07:14.022581 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e172103-ff1a-4a40-984a-bc962737add5" containerName="pruner" Oct 09 09:07:14 crc kubenswrapper[4710]: I1009 09:07:14.022669 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f367bf0-fb0c-4884-b874-d4a426a8ff5a" containerName="collect-profiles" Oct 09 09:07:14 crc kubenswrapper[4710]: I1009 09:07:14.022681 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e172103-ff1a-4a40-984a-bc962737add5" containerName="pruner" Oct 09 09:07:14 crc kubenswrapper[4710]: I1009 09:07:14.023021 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 09:07:14 crc kubenswrapper[4710]: I1009 09:07:14.026080 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 09 09:07:14 crc kubenswrapper[4710]: I1009 09:07:14.029982 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 09 09:07:14 crc kubenswrapper[4710]: I1009 09:07:14.031497 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 09 09:07:14 crc kubenswrapper[4710]: I1009 09:07:14.078891 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/95960189-5bcf-4fcf-bd91-99ac89579dbe-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"95960189-5bcf-4fcf-bd91-99ac89579dbe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 09:07:14 crc kubenswrapper[4710]: I1009 09:07:14.078976 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/95960189-5bcf-4fcf-bd91-99ac89579dbe-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"95960189-5bcf-4fcf-bd91-99ac89579dbe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 09:07:14 crc kubenswrapper[4710]: I1009 09:07:14.195711 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/95960189-5bcf-4fcf-bd91-99ac89579dbe-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"95960189-5bcf-4fcf-bd91-99ac89579dbe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 09:07:14 crc kubenswrapper[4710]: I1009 09:07:14.195965 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/95960189-5bcf-4fcf-bd91-99ac89579dbe-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"95960189-5bcf-4fcf-bd91-99ac89579dbe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 09:07:14 crc kubenswrapper[4710]: I1009 09:07:14.196114 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/95960189-5bcf-4fcf-bd91-99ac89579dbe-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"95960189-5bcf-4fcf-bd91-99ac89579dbe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 09:07:14 crc kubenswrapper[4710]: I1009 09:07:14.221022 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/95960189-5bcf-4fcf-bd91-99ac89579dbe-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"95960189-5bcf-4fcf-bd91-99ac89579dbe\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 09:07:14 crc kubenswrapper[4710]: I1009 09:07:14.348877 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 09:07:14 crc kubenswrapper[4710]: I1009 09:07:14.700712 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 09 09:07:14 crc kubenswrapper[4710]: W1009 09:07:14.736658 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod95960189_5bcf_4fcf_bd91_99ac89579dbe.slice/crio-33f7e3180c78b6854c3b37176fe3368a0561f7b25620c0ccf0f7fa87ea04be30 WatchSource:0}: Error finding container 33f7e3180c78b6854c3b37176fe3368a0561f7b25620c0ccf0f7fa87ea04be30: Status 404 returned error can't find the container with id 33f7e3180c78b6854c3b37176fe3368a0561f7b25620c0ccf0f7fa87ea04be30 Oct 09 09:07:15 crc kubenswrapper[4710]: I1009 09:07:15.008485 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"95960189-5bcf-4fcf-bd91-99ac89579dbe","Type":"ContainerStarted","Data":"33f7e3180c78b6854c3b37176fe3368a0561f7b25620c0ccf0f7fa87ea04be30"} Oct 09 09:07:15 crc kubenswrapper[4710]: I1009 09:07:15.450959 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-ckn5c" Oct 09 09:07:16 crc kubenswrapper[4710]: I1009 09:07:16.034971 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"95960189-5bcf-4fcf-bd91-99ac89579dbe","Type":"ContainerStarted","Data":"69eeaf39860f008e8a7bfe1b4106797e5fb839de7d380c774b5a1c9504fc94cf"} Oct 09 09:07:16 crc kubenswrapper[4710]: I1009 09:07:16.053444 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.053418596 podStartE2EDuration="2.053418596s" podCreationTimestamp="2025-10-09 09:07:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:16.051579924 +0000 UTC m=+159.541688321" watchObservedRunningTime="2025-10-09 09:07:16.053418596 +0000 UTC m=+159.543526993" Oct 09 09:07:17 crc kubenswrapper[4710]: I1009 09:07:17.098508 4710 generic.go:334] "Generic (PLEG): container finished" podID="95960189-5bcf-4fcf-bd91-99ac89579dbe" containerID="69eeaf39860f008e8a7bfe1b4106797e5fb839de7d380c774b5a1c9504fc94cf" exitCode=0 Oct 09 09:07:17 crc kubenswrapper[4710]: I1009 09:07:17.098573 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"95960189-5bcf-4fcf-bd91-99ac89579dbe","Type":"ContainerDied","Data":"69eeaf39860f008e8a7bfe1b4106797e5fb839de7d380c774b5a1c9504fc94cf"} Oct 09 09:07:19 crc kubenswrapper[4710]: I1009 09:07:19.595595 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs\") pod \"network-metrics-daemon-p9sh6\" (UID: \"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\") " pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:07:19 crc kubenswrapper[4710]: I1009 09:07:19.606246 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b237d61d-3d37-4b76-afa3-d5fe7119b0b6-metrics-certs\") pod \"network-metrics-daemon-p9sh6\" (UID: \"b237d61d-3d37-4b76-afa3-d5fe7119b0b6\") " pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:07:19 crc kubenswrapper[4710]: I1009 09:07:19.655307 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:07:19 crc kubenswrapper[4710]: I1009 09:07:19.659759 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:07:19 crc kubenswrapper[4710]: I1009 09:07:19.840022 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-p9sh6" Oct 09 09:07:19 crc kubenswrapper[4710]: I1009 09:07:19.977674 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-bmqbd" Oct 09 09:07:21 crc kubenswrapper[4710]: I1009 09:07:21.480017 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 09:07:21 crc kubenswrapper[4710]: I1009 09:07:21.633173 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/95960189-5bcf-4fcf-bd91-99ac89579dbe-kubelet-dir\") pod \"95960189-5bcf-4fcf-bd91-99ac89579dbe\" (UID: \"95960189-5bcf-4fcf-bd91-99ac89579dbe\") " Oct 09 09:07:21 crc kubenswrapper[4710]: I1009 09:07:21.633249 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/95960189-5bcf-4fcf-bd91-99ac89579dbe-kube-api-access\") pod \"95960189-5bcf-4fcf-bd91-99ac89579dbe\" (UID: \"95960189-5bcf-4fcf-bd91-99ac89579dbe\") " Oct 09 09:07:21 crc kubenswrapper[4710]: I1009 09:07:21.633295 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/95960189-5bcf-4fcf-bd91-99ac89579dbe-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "95960189-5bcf-4fcf-bd91-99ac89579dbe" (UID: "95960189-5bcf-4fcf-bd91-99ac89579dbe"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:07:21 crc kubenswrapper[4710]: I1009 09:07:21.633530 4710 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/95960189-5bcf-4fcf-bd91-99ac89579dbe-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 09 09:07:21 crc kubenswrapper[4710]: I1009 09:07:21.640256 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95960189-5bcf-4fcf-bd91-99ac89579dbe-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "95960189-5bcf-4fcf-bd91-99ac89579dbe" (UID: "95960189-5bcf-4fcf-bd91-99ac89579dbe"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:07:21 crc kubenswrapper[4710]: I1009 09:07:21.735718 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/95960189-5bcf-4fcf-bd91-99ac89579dbe-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 09 09:07:22 crc kubenswrapper[4710]: I1009 09:07:22.147100 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"95960189-5bcf-4fcf-bd91-99ac89579dbe","Type":"ContainerDied","Data":"33f7e3180c78b6854c3b37176fe3368a0561f7b25620c0ccf0f7fa87ea04be30"} Oct 09 09:07:22 crc kubenswrapper[4710]: I1009 09:07:22.147171 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="33f7e3180c78b6854c3b37176fe3368a0561f7b25620c0ccf0f7fa87ea04be30" Oct 09 09:07:22 crc kubenswrapper[4710]: I1009 09:07:22.147243 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 09 09:07:22 crc kubenswrapper[4710]: I1009 09:07:22.284411 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-p9sh6"] Oct 09 09:07:22 crc kubenswrapper[4710]: W1009 09:07:22.297211 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb237d61d_3d37_4b76_afa3_d5fe7119b0b6.slice/crio-5acf569e262958a4e43394d2a7c40d6453c2be8246636f2c340642959b35aef3 WatchSource:0}: Error finding container 5acf569e262958a4e43394d2a7c40d6453c2be8246636f2c340642959b35aef3: Status 404 returned error can't find the container with id 5acf569e262958a4e43394d2a7c40d6453c2be8246636f2c340642959b35aef3 Oct 09 09:07:23 crc kubenswrapper[4710]: I1009 09:07:23.157570 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-p9sh6" event={"ID":"b237d61d-3d37-4b76-afa3-d5fe7119b0b6","Type":"ContainerStarted","Data":"7cf5ca3d971858d41da3ee5b402ae29fac2742b29ead44aadb84ba3a7b743ef5"} Oct 09 09:07:23 crc kubenswrapper[4710]: I1009 09:07:23.157889 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-p9sh6" event={"ID":"b237d61d-3d37-4b76-afa3-d5fe7119b0b6","Type":"ContainerStarted","Data":"16b40fb26756203dab7ed7b16f4ff6bf597fef86a85fd87115e901f20c965d69"} Oct 09 09:07:23 crc kubenswrapper[4710]: I1009 09:07:23.157903 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-p9sh6" event={"ID":"b237d61d-3d37-4b76-afa3-d5fe7119b0b6","Type":"ContainerStarted","Data":"5acf569e262958a4e43394d2a7c40d6453c2be8246636f2c340642959b35aef3"} Oct 09 09:07:23 crc kubenswrapper[4710]: I1009 09:07:23.172643 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-p9sh6" podStartSLOduration=146.17262931 podStartE2EDuration="2m26.17262931s" podCreationTimestamp="2025-10-09 09:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:07:23.169955944 +0000 UTC m=+166.660064342" watchObservedRunningTime="2025-10-09 09:07:23.17262931 +0000 UTC m=+166.662737706" Oct 09 09:07:29 crc kubenswrapper[4710]: I1009 09:07:29.324456 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:07:34 crc kubenswrapper[4710]: I1009 09:07:34.225169 4710 generic.go:334] "Generic (PLEG): container finished" podID="db9cdb3d-9888-46e4-a5db-710577557a80" containerID="7654e87089f03dbe68f4c532bd124315ae56dc93a9d41bd2b5c2ff1ca49bb5c1" exitCode=0 Oct 09 09:07:34 crc kubenswrapper[4710]: I1009 09:07:34.225302 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q9t84" event={"ID":"db9cdb3d-9888-46e4-a5db-710577557a80","Type":"ContainerDied","Data":"7654e87089f03dbe68f4c532bd124315ae56dc93a9d41bd2b5c2ff1ca49bb5c1"} Oct 09 09:07:34 crc kubenswrapper[4710]: I1009 09:07:34.228321 4710 generic.go:334] "Generic (PLEG): container finished" podID="01eaf904-9e41-4374-8fe8-ed5f5a4053ac" containerID="da2129a2d6f753d2d9507e3ad5226923598903e017af8ac2436b37bbe58814f4" exitCode=0 Oct 09 09:07:34 crc kubenswrapper[4710]: I1009 09:07:34.228381 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sjp9w" event={"ID":"01eaf904-9e41-4374-8fe8-ed5f5a4053ac","Type":"ContainerDied","Data":"da2129a2d6f753d2d9507e3ad5226923598903e017af8ac2436b37bbe58814f4"} Oct 09 09:07:34 crc kubenswrapper[4710]: I1009 09:07:34.231340 4710 generic.go:334] "Generic (PLEG): container finished" podID="5e5a0c99-47a8-46dd-b869-511a979f4b8c" containerID="9cd990960171b195bf93481fa704d1dc5223ca99aef3ce3817a367e12e5982e4" exitCode=0 Oct 09 09:07:34 crc kubenswrapper[4710]: I1009 09:07:34.231542 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bjx77" event={"ID":"5e5a0c99-47a8-46dd-b869-511a979f4b8c","Type":"ContainerDied","Data":"9cd990960171b195bf93481fa704d1dc5223ca99aef3ce3817a367e12e5982e4"} Oct 09 09:07:34 crc kubenswrapper[4710]: I1009 09:07:34.234310 4710 generic.go:334] "Generic (PLEG): container finished" podID="faced6b4-369b-4866-8fe7-e9c80f9fc52b" containerID="bd4e44c90bfc11ddf275799c5749a9f943bd2b44e39f3fd5de0ff563d3e2f04f" exitCode=0 Oct 09 09:07:34 crc kubenswrapper[4710]: I1009 09:07:34.234360 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4njdg" event={"ID":"faced6b4-369b-4866-8fe7-e9c80f9fc52b","Type":"ContainerDied","Data":"bd4e44c90bfc11ddf275799c5749a9f943bd2b44e39f3fd5de0ff563d3e2f04f"} Oct 09 09:07:34 crc kubenswrapper[4710]: I1009 09:07:34.237741 4710 generic.go:334] "Generic (PLEG): container finished" podID="b67ac216-99a7-41b5-9f03-65af85f1667e" containerID="9dfc4fe7bebe1f9b28d4162f94709edc8416078a2c62f907ea6d891209c7a836" exitCode=0 Oct 09 09:07:34 crc kubenswrapper[4710]: I1009 09:07:34.237759 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7w7ww" event={"ID":"b67ac216-99a7-41b5-9f03-65af85f1667e","Type":"ContainerDied","Data":"9dfc4fe7bebe1f9b28d4162f94709edc8416078a2c62f907ea6d891209c7a836"} Oct 09 09:07:34 crc kubenswrapper[4710]: I1009 09:07:34.239963 4710 generic.go:334] "Generic (PLEG): container finished" podID="542f16e8-eda7-4525-ba86-cc890a369f30" containerID="2a5926109f5917dc40a03f1ff002f92b0b37d4f00ed553323641c65a12dcdf4c" exitCode=0 Oct 09 09:07:34 crc kubenswrapper[4710]: I1009 09:07:34.240023 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tqk6t" event={"ID":"542f16e8-eda7-4525-ba86-cc890a369f30","Type":"ContainerDied","Data":"2a5926109f5917dc40a03f1ff002f92b0b37d4f00ed553323641c65a12dcdf4c"} Oct 09 09:07:34 crc kubenswrapper[4710]: I1009 09:07:34.243613 4710 generic.go:334] "Generic (PLEG): container finished" podID="c5e7c194-8f27-4279-a34b-d8a0a94bdb03" containerID="87cba35e4f75d080940d87be4c42b1d77dfae5fac093b4831e0421607118ed37" exitCode=0 Oct 09 09:07:34 crc kubenswrapper[4710]: I1009 09:07:34.243667 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l7l4g" event={"ID":"c5e7c194-8f27-4279-a34b-d8a0a94bdb03","Type":"ContainerDied","Data":"87cba35e4f75d080940d87be4c42b1d77dfae5fac093b4831e0421607118ed37"} Oct 09 09:07:34 crc kubenswrapper[4710]: I1009 09:07:34.247671 4710 generic.go:334] "Generic (PLEG): container finished" podID="e73b2237-e967-49cc-9368-c670f2749a60" containerID="008b594ec08a51f16fb912c232c011d45627cd6425aa9efa0b51de93ed20d73f" exitCode=0 Oct 09 09:07:34 crc kubenswrapper[4710]: I1009 09:07:34.247728 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s8wd8" event={"ID":"e73b2237-e967-49cc-9368-c670f2749a60","Type":"ContainerDied","Data":"008b594ec08a51f16fb912c232c011d45627cd6425aa9efa0b51de93ed20d73f"} Oct 09 09:07:35 crc kubenswrapper[4710]: I1009 09:07:35.257955 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4njdg" event={"ID":"faced6b4-369b-4866-8fe7-e9c80f9fc52b","Type":"ContainerStarted","Data":"8af88e1d8086184eb82c24ebd2e0aab1d5ccba8a27644ba263bcf10bd89653a8"} Oct 09 09:07:35 crc kubenswrapper[4710]: I1009 09:07:35.260832 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7w7ww" event={"ID":"b67ac216-99a7-41b5-9f03-65af85f1667e","Type":"ContainerStarted","Data":"0cc2e6d6a9aa91acdae18bae39d39cfc998a977118c215ed838e05d3dff99d58"} Oct 09 09:07:35 crc kubenswrapper[4710]: I1009 09:07:35.263632 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tqk6t" event={"ID":"542f16e8-eda7-4525-ba86-cc890a369f30","Type":"ContainerStarted","Data":"6e239399b79e9d541524ee973c8b946fa7479e2e50c555f7a1985027b71b87fe"} Oct 09 09:07:35 crc kubenswrapper[4710]: I1009 09:07:35.266321 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s8wd8" event={"ID":"e73b2237-e967-49cc-9368-c670f2749a60","Type":"ContainerStarted","Data":"76f9289ae01963cc780168d410960301e260f0d8780236909f3a7547dcaba49e"} Oct 09 09:07:35 crc kubenswrapper[4710]: I1009 09:07:35.268759 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l7l4g" event={"ID":"c5e7c194-8f27-4279-a34b-d8a0a94bdb03","Type":"ContainerStarted","Data":"2d27b34e227effb30108e30f18f2805fa7ca916fd93618aedbd6f57fd87b6afd"} Oct 09 09:07:35 crc kubenswrapper[4710]: I1009 09:07:35.270544 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q9t84" event={"ID":"db9cdb3d-9888-46e4-a5db-710577557a80","Type":"ContainerStarted","Data":"fbc679583112279e26b84efae67a345de0f3a026947200dc0c6ca908ff49af26"} Oct 09 09:07:35 crc kubenswrapper[4710]: I1009 09:07:35.273543 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bjx77" event={"ID":"5e5a0c99-47a8-46dd-b869-511a979f4b8c","Type":"ContainerStarted","Data":"8ebb4f4d6004dddb43f114514cf441c6a88e67a4914edda0843ef27531bf2455"} Oct 09 09:07:35 crc kubenswrapper[4710]: I1009 09:07:35.275700 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sjp9w" event={"ID":"01eaf904-9e41-4374-8fe8-ed5f5a4053ac","Type":"ContainerStarted","Data":"bd58837a4b3e765e6e4872ce7dcb5690f344eb436aa08284fee8c05e31d84281"} Oct 09 09:07:35 crc kubenswrapper[4710]: I1009 09:07:35.290291 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4njdg" podStartSLOduration=3.00124567 podStartE2EDuration="28.29027213s" podCreationTimestamp="2025-10-09 09:07:07 +0000 UTC" firstStartedPulling="2025-10-09 09:07:09.714650227 +0000 UTC m=+153.204758623" lastFinishedPulling="2025-10-09 09:07:35.003676686 +0000 UTC m=+178.493785083" observedRunningTime="2025-10-09 09:07:35.288941243 +0000 UTC m=+178.779049640" watchObservedRunningTime="2025-10-09 09:07:35.29027213 +0000 UTC m=+178.780380527" Oct 09 09:07:35 crc kubenswrapper[4710]: I1009 09:07:35.313823 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-sjp9w" podStartSLOduration=2.154841167 podStartE2EDuration="26.313805182s" podCreationTimestamp="2025-10-09 09:07:09 +0000 UTC" firstStartedPulling="2025-10-09 09:07:10.752138813 +0000 UTC m=+154.242247210" lastFinishedPulling="2025-10-09 09:07:34.911102828 +0000 UTC m=+178.401211225" observedRunningTime="2025-10-09 09:07:35.310273462 +0000 UTC m=+178.800381859" watchObservedRunningTime="2025-10-09 09:07:35.313805182 +0000 UTC m=+178.803913579" Oct 09 09:07:35 crc kubenswrapper[4710]: I1009 09:07:35.344155 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tqk6t" podStartSLOduration=2.4609747840000002 podStartE2EDuration="25.34412747s" podCreationTimestamp="2025-10-09 09:07:10 +0000 UTC" firstStartedPulling="2025-10-09 09:07:11.886082995 +0000 UTC m=+155.376191392" lastFinishedPulling="2025-10-09 09:07:34.769235681 +0000 UTC m=+178.259344078" observedRunningTime="2025-10-09 09:07:35.341104067 +0000 UTC m=+178.831212464" watchObservedRunningTime="2025-10-09 09:07:35.34412747 +0000 UTC m=+178.834235867" Oct 09 09:07:35 crc kubenswrapper[4710]: I1009 09:07:35.364337 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-s8wd8" podStartSLOduration=4.110946699 podStartE2EDuration="29.364309784s" podCreationTimestamp="2025-10-09 09:07:06 +0000 UTC" firstStartedPulling="2025-10-09 09:07:09.694720819 +0000 UTC m=+153.184829216" lastFinishedPulling="2025-10-09 09:07:34.948083904 +0000 UTC m=+178.438192301" observedRunningTime="2025-10-09 09:07:35.359932671 +0000 UTC m=+178.850041068" watchObservedRunningTime="2025-10-09 09:07:35.364309784 +0000 UTC m=+178.854418181" Oct 09 09:07:35 crc kubenswrapper[4710]: I1009 09:07:35.381159 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-q9t84" podStartSLOduration=2.213431425 podStartE2EDuration="28.381112383s" podCreationTimestamp="2025-10-09 09:07:07 +0000 UTC" firstStartedPulling="2025-10-09 09:07:08.642256335 +0000 UTC m=+152.132364731" lastFinishedPulling="2025-10-09 09:07:34.809937292 +0000 UTC m=+178.300045689" observedRunningTime="2025-10-09 09:07:35.378794477 +0000 UTC m=+178.868902875" watchObservedRunningTime="2025-10-09 09:07:35.381112383 +0000 UTC m=+178.871220780" Oct 09 09:07:35 crc kubenswrapper[4710]: I1009 09:07:35.401777 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-l7l4g" podStartSLOduration=2.453285163 podStartE2EDuration="25.401755293s" podCreationTimestamp="2025-10-09 09:07:10 +0000 UTC" firstStartedPulling="2025-10-09 09:07:11.899995904 +0000 UTC m=+155.390104290" lastFinishedPulling="2025-10-09 09:07:34.848466022 +0000 UTC m=+178.338574420" observedRunningTime="2025-10-09 09:07:35.395308965 +0000 UTC m=+178.885417362" watchObservedRunningTime="2025-10-09 09:07:35.401755293 +0000 UTC m=+178.891863691" Oct 09 09:07:35 crc kubenswrapper[4710]: I1009 09:07:35.444701 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bjx77" podStartSLOduration=2.473702255 podStartE2EDuration="26.444685141s" podCreationTimestamp="2025-10-09 09:07:09 +0000 UTC" firstStartedPulling="2025-10-09 09:07:10.778698274 +0000 UTC m=+154.268806671" lastFinishedPulling="2025-10-09 09:07:34.749681159 +0000 UTC m=+178.239789557" observedRunningTime="2025-10-09 09:07:35.443533233 +0000 UTC m=+178.933641629" watchObservedRunningTime="2025-10-09 09:07:35.444685141 +0000 UTC m=+178.934793539" Oct 09 09:07:35 crc kubenswrapper[4710]: I1009 09:07:35.459522 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7w7ww" podStartSLOduration=3.42209232 podStartE2EDuration="28.459498925s" podCreationTimestamp="2025-10-09 09:07:07 +0000 UTC" firstStartedPulling="2025-10-09 09:07:09.684859059 +0000 UTC m=+153.174967457" lastFinishedPulling="2025-10-09 09:07:34.722265665 +0000 UTC m=+178.212374062" observedRunningTime="2025-10-09 09:07:35.458296942 +0000 UTC m=+178.948405340" watchObservedRunningTime="2025-10-09 09:07:35.459498925 +0000 UTC m=+178.949607313" Oct 09 09:07:35 crc kubenswrapper[4710]: I1009 09:07:35.545974 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:07:35 crc kubenswrapper[4710]: I1009 09:07:35.546042 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:07:37 crc kubenswrapper[4710]: I1009 09:07:37.451788 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-q9t84" Oct 09 09:07:37 crc kubenswrapper[4710]: I1009 09:07:37.452553 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-q9t84" Oct 09 09:07:37 crc kubenswrapper[4710]: I1009 09:07:37.517675 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-q9t84" Oct 09 09:07:37 crc kubenswrapper[4710]: I1009 09:07:37.841278 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7w7ww" Oct 09 09:07:37 crc kubenswrapper[4710]: I1009 09:07:37.841336 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7w7ww" Oct 09 09:07:37 crc kubenswrapper[4710]: I1009 09:07:37.868244 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7w7ww" Oct 09 09:07:38 crc kubenswrapper[4710]: I1009 09:07:38.289385 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-s8wd8" Oct 09 09:07:38 crc kubenswrapper[4710]: I1009 09:07:38.289494 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-s8wd8" Oct 09 09:07:38 crc kubenswrapper[4710]: I1009 09:07:38.323842 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-s8wd8" Oct 09 09:07:38 crc kubenswrapper[4710]: I1009 09:07:38.517470 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4njdg" Oct 09 09:07:38 crc kubenswrapper[4710]: I1009 09:07:38.518243 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4njdg" Oct 09 09:07:38 crc kubenswrapper[4710]: I1009 09:07:38.549953 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4njdg" Oct 09 09:07:39 crc kubenswrapper[4710]: I1009 09:07:39.578190 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bjx77" Oct 09 09:07:39 crc kubenswrapper[4710]: I1009 09:07:39.578528 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bjx77" Oct 09 09:07:39 crc kubenswrapper[4710]: I1009 09:07:39.608849 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bjx77" Oct 09 09:07:39 crc kubenswrapper[4710]: I1009 09:07:39.808465 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-5j8t6" Oct 09 09:07:39 crc kubenswrapper[4710]: I1009 09:07:39.993949 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-sjp9w" Oct 09 09:07:39 crc kubenswrapper[4710]: I1009 09:07:39.994279 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-sjp9w" Oct 09 09:07:40 crc kubenswrapper[4710]: I1009 09:07:40.024112 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-sjp9w" Oct 09 09:07:40 crc kubenswrapper[4710]: I1009 09:07:40.335485 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4njdg" Oct 09 09:07:40 crc kubenswrapper[4710]: I1009 09:07:40.337072 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-sjp9w" Oct 09 09:07:40 crc kubenswrapper[4710]: I1009 09:07:40.338215 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bjx77" Oct 09 09:07:40 crc kubenswrapper[4710]: I1009 09:07:40.340905 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-s8wd8" Oct 09 09:07:40 crc kubenswrapper[4710]: I1009 09:07:40.390768 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-l7l4g" Oct 09 09:07:40 crc kubenswrapper[4710]: I1009 09:07:40.390794 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-l7l4g" Oct 09 09:07:40 crc kubenswrapper[4710]: I1009 09:07:40.419826 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-l7l4g" Oct 09 09:07:40 crc kubenswrapper[4710]: I1009 09:07:40.591556 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tqk6t" Oct 09 09:07:40 crc kubenswrapper[4710]: I1009 09:07:40.591615 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tqk6t" Oct 09 09:07:40 crc kubenswrapper[4710]: I1009 09:07:40.619917 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tqk6t" Oct 09 09:07:41 crc kubenswrapper[4710]: I1009 09:07:41.338478 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-l7l4g" Oct 09 09:07:41 crc kubenswrapper[4710]: I1009 09:07:41.338531 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tqk6t" Oct 09 09:07:41 crc kubenswrapper[4710]: I1009 09:07:41.865083 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4njdg"] Oct 09 09:07:42 crc kubenswrapper[4710]: I1009 09:07:42.313082 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4njdg" podUID="faced6b4-369b-4866-8fe7-e9c80f9fc52b" containerName="registry-server" containerID="cri-o://8af88e1d8086184eb82c24ebd2e0aab1d5ccba8a27644ba263bcf10bd89653a8" gracePeriod=2 Oct 09 09:07:42 crc kubenswrapper[4710]: I1009 09:07:42.604343 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4njdg" Oct 09 09:07:42 crc kubenswrapper[4710]: I1009 09:07:42.733197 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 09 09:07:42 crc kubenswrapper[4710]: I1009 09:07:42.746616 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/faced6b4-369b-4866-8fe7-e9c80f9fc52b-utilities\") pod \"faced6b4-369b-4866-8fe7-e9c80f9fc52b\" (UID: \"faced6b4-369b-4866-8fe7-e9c80f9fc52b\") " Oct 09 09:07:42 crc kubenswrapper[4710]: I1009 09:07:42.746768 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mt2fd\" (UniqueName: \"kubernetes.io/projected/faced6b4-369b-4866-8fe7-e9c80f9fc52b-kube-api-access-mt2fd\") pod \"faced6b4-369b-4866-8fe7-e9c80f9fc52b\" (UID: \"faced6b4-369b-4866-8fe7-e9c80f9fc52b\") " Oct 09 09:07:42 crc kubenswrapper[4710]: I1009 09:07:42.747397 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/faced6b4-369b-4866-8fe7-e9c80f9fc52b-utilities" (OuterVolumeSpecName: "utilities") pod "faced6b4-369b-4866-8fe7-e9c80f9fc52b" (UID: "faced6b4-369b-4866-8fe7-e9c80f9fc52b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:07:42 crc kubenswrapper[4710]: I1009 09:07:42.748624 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/faced6b4-369b-4866-8fe7-e9c80f9fc52b-catalog-content\") pod \"faced6b4-369b-4866-8fe7-e9c80f9fc52b\" (UID: \"faced6b4-369b-4866-8fe7-e9c80f9fc52b\") " Oct 09 09:07:42 crc kubenswrapper[4710]: I1009 09:07:42.749829 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/faced6b4-369b-4866-8fe7-e9c80f9fc52b-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:07:42 crc kubenswrapper[4710]: I1009 09:07:42.764353 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/faced6b4-369b-4866-8fe7-e9c80f9fc52b-kube-api-access-mt2fd" (OuterVolumeSpecName: "kube-api-access-mt2fd") pod "faced6b4-369b-4866-8fe7-e9c80f9fc52b" (UID: "faced6b4-369b-4866-8fe7-e9c80f9fc52b"). InnerVolumeSpecName "kube-api-access-mt2fd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:07:42 crc kubenswrapper[4710]: I1009 09:07:42.791293 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/faced6b4-369b-4866-8fe7-e9c80f9fc52b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "faced6b4-369b-4866-8fe7-e9c80f9fc52b" (UID: "faced6b4-369b-4866-8fe7-e9c80f9fc52b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:07:42 crc kubenswrapper[4710]: I1009 09:07:42.850981 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mt2fd\" (UniqueName: \"kubernetes.io/projected/faced6b4-369b-4866-8fe7-e9c80f9fc52b-kube-api-access-mt2fd\") on node \"crc\" DevicePath \"\"" Oct 09 09:07:42 crc kubenswrapper[4710]: I1009 09:07:42.851005 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/faced6b4-369b-4866-8fe7-e9c80f9fc52b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:07:43 crc kubenswrapper[4710]: I1009 09:07:43.319593 4710 generic.go:334] "Generic (PLEG): container finished" podID="faced6b4-369b-4866-8fe7-e9c80f9fc52b" containerID="8af88e1d8086184eb82c24ebd2e0aab1d5ccba8a27644ba263bcf10bd89653a8" exitCode=0 Oct 09 09:07:43 crc kubenswrapper[4710]: I1009 09:07:43.319675 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4njdg" Oct 09 09:07:43 crc kubenswrapper[4710]: I1009 09:07:43.319678 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4njdg" event={"ID":"faced6b4-369b-4866-8fe7-e9c80f9fc52b","Type":"ContainerDied","Data":"8af88e1d8086184eb82c24ebd2e0aab1d5ccba8a27644ba263bcf10bd89653a8"} Oct 09 09:07:43 crc kubenswrapper[4710]: I1009 09:07:43.319997 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4njdg" event={"ID":"faced6b4-369b-4866-8fe7-e9c80f9fc52b","Type":"ContainerDied","Data":"3544276a8acc3d6f5f6c7c65fc1947c7bea86aefb12959f0927606b45d01a255"} Oct 09 09:07:43 crc kubenswrapper[4710]: I1009 09:07:43.320016 4710 scope.go:117] "RemoveContainer" containerID="8af88e1d8086184eb82c24ebd2e0aab1d5ccba8a27644ba263bcf10bd89653a8" Oct 09 09:07:43 crc kubenswrapper[4710]: I1009 09:07:43.339033 4710 scope.go:117] "RemoveContainer" containerID="bd4e44c90bfc11ddf275799c5749a9f943bd2b44e39f3fd5de0ff563d3e2f04f" Oct 09 09:07:43 crc kubenswrapper[4710]: I1009 09:07:43.339178 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4njdg"] Oct 09 09:07:43 crc kubenswrapper[4710]: I1009 09:07:43.341803 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4njdg"] Oct 09 09:07:43 crc kubenswrapper[4710]: I1009 09:07:43.358256 4710 scope.go:117] "RemoveContainer" containerID="46feb71983da243b898534aa07af96afeca695bc1e783c61e44a8ca2e5efda6a" Oct 09 09:07:43 crc kubenswrapper[4710]: I1009 09:07:43.369680 4710 scope.go:117] "RemoveContainer" containerID="8af88e1d8086184eb82c24ebd2e0aab1d5ccba8a27644ba263bcf10bd89653a8" Oct 09 09:07:43 crc kubenswrapper[4710]: E1009 09:07:43.370012 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8af88e1d8086184eb82c24ebd2e0aab1d5ccba8a27644ba263bcf10bd89653a8\": container with ID starting with 8af88e1d8086184eb82c24ebd2e0aab1d5ccba8a27644ba263bcf10bd89653a8 not found: ID does not exist" containerID="8af88e1d8086184eb82c24ebd2e0aab1d5ccba8a27644ba263bcf10bd89653a8" Oct 09 09:07:43 crc kubenswrapper[4710]: I1009 09:07:43.370065 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8af88e1d8086184eb82c24ebd2e0aab1d5ccba8a27644ba263bcf10bd89653a8"} err="failed to get container status \"8af88e1d8086184eb82c24ebd2e0aab1d5ccba8a27644ba263bcf10bd89653a8\": rpc error: code = NotFound desc = could not find container \"8af88e1d8086184eb82c24ebd2e0aab1d5ccba8a27644ba263bcf10bd89653a8\": container with ID starting with 8af88e1d8086184eb82c24ebd2e0aab1d5ccba8a27644ba263bcf10bd89653a8 not found: ID does not exist" Oct 09 09:07:43 crc kubenswrapper[4710]: I1009 09:07:43.370127 4710 scope.go:117] "RemoveContainer" containerID="bd4e44c90bfc11ddf275799c5749a9f943bd2b44e39f3fd5de0ff563d3e2f04f" Oct 09 09:07:43 crc kubenswrapper[4710]: E1009 09:07:43.370420 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd4e44c90bfc11ddf275799c5749a9f943bd2b44e39f3fd5de0ff563d3e2f04f\": container with ID starting with bd4e44c90bfc11ddf275799c5749a9f943bd2b44e39f3fd5de0ff563d3e2f04f not found: ID does not exist" containerID="bd4e44c90bfc11ddf275799c5749a9f943bd2b44e39f3fd5de0ff563d3e2f04f" Oct 09 09:07:43 crc kubenswrapper[4710]: I1009 09:07:43.370749 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd4e44c90bfc11ddf275799c5749a9f943bd2b44e39f3fd5de0ff563d3e2f04f"} err="failed to get container status \"bd4e44c90bfc11ddf275799c5749a9f943bd2b44e39f3fd5de0ff563d3e2f04f\": rpc error: code = NotFound desc = could not find container \"bd4e44c90bfc11ddf275799c5749a9f943bd2b44e39f3fd5de0ff563d3e2f04f\": container with ID starting with bd4e44c90bfc11ddf275799c5749a9f943bd2b44e39f3fd5de0ff563d3e2f04f not found: ID does not exist" Oct 09 09:07:43 crc kubenswrapper[4710]: I1009 09:07:43.370776 4710 scope.go:117] "RemoveContainer" containerID="46feb71983da243b898534aa07af96afeca695bc1e783c61e44a8ca2e5efda6a" Oct 09 09:07:43 crc kubenswrapper[4710]: E1009 09:07:43.371181 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46feb71983da243b898534aa07af96afeca695bc1e783c61e44a8ca2e5efda6a\": container with ID starting with 46feb71983da243b898534aa07af96afeca695bc1e783c61e44a8ca2e5efda6a not found: ID does not exist" containerID="46feb71983da243b898534aa07af96afeca695bc1e783c61e44a8ca2e5efda6a" Oct 09 09:07:43 crc kubenswrapper[4710]: I1009 09:07:43.371211 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46feb71983da243b898534aa07af96afeca695bc1e783c61e44a8ca2e5efda6a"} err="failed to get container status \"46feb71983da243b898534aa07af96afeca695bc1e783c61e44a8ca2e5efda6a\": rpc error: code = NotFound desc = could not find container \"46feb71983da243b898534aa07af96afeca695bc1e783c61e44a8ca2e5efda6a\": container with ID starting with 46feb71983da243b898534aa07af96afeca695bc1e783c61e44a8ca2e5efda6a not found: ID does not exist" Oct 09 09:07:44 crc kubenswrapper[4710]: I1009 09:07:44.264750 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sjp9w"] Oct 09 09:07:44 crc kubenswrapper[4710]: I1009 09:07:44.264949 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-sjp9w" podUID="01eaf904-9e41-4374-8fe8-ed5f5a4053ac" containerName="registry-server" containerID="cri-o://bd58837a4b3e765e6e4872ce7dcb5690f344eb436aa08284fee8c05e31d84281" gracePeriod=2 Oct 09 09:07:44 crc kubenswrapper[4710]: I1009 09:07:44.535380 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sjp9w" Oct 09 09:07:44 crc kubenswrapper[4710]: I1009 09:07:44.672601 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01eaf904-9e41-4374-8fe8-ed5f5a4053ac-catalog-content\") pod \"01eaf904-9e41-4374-8fe8-ed5f5a4053ac\" (UID: \"01eaf904-9e41-4374-8fe8-ed5f5a4053ac\") " Oct 09 09:07:44 crc kubenswrapper[4710]: I1009 09:07:44.672673 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01eaf904-9e41-4374-8fe8-ed5f5a4053ac-utilities\") pod \"01eaf904-9e41-4374-8fe8-ed5f5a4053ac\" (UID: \"01eaf904-9e41-4374-8fe8-ed5f5a4053ac\") " Oct 09 09:07:44 crc kubenswrapper[4710]: I1009 09:07:44.672744 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tc47d\" (UniqueName: \"kubernetes.io/projected/01eaf904-9e41-4374-8fe8-ed5f5a4053ac-kube-api-access-tc47d\") pod \"01eaf904-9e41-4374-8fe8-ed5f5a4053ac\" (UID: \"01eaf904-9e41-4374-8fe8-ed5f5a4053ac\") " Oct 09 09:07:44 crc kubenswrapper[4710]: I1009 09:07:44.673523 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01eaf904-9e41-4374-8fe8-ed5f5a4053ac-utilities" (OuterVolumeSpecName: "utilities") pod "01eaf904-9e41-4374-8fe8-ed5f5a4053ac" (UID: "01eaf904-9e41-4374-8fe8-ed5f5a4053ac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:07:44 crc kubenswrapper[4710]: I1009 09:07:44.673773 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01eaf904-9e41-4374-8fe8-ed5f5a4053ac-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:07:44 crc kubenswrapper[4710]: I1009 09:07:44.677944 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01eaf904-9e41-4374-8fe8-ed5f5a4053ac-kube-api-access-tc47d" (OuterVolumeSpecName: "kube-api-access-tc47d") pod "01eaf904-9e41-4374-8fe8-ed5f5a4053ac" (UID: "01eaf904-9e41-4374-8fe8-ed5f5a4053ac"). InnerVolumeSpecName "kube-api-access-tc47d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:07:44 crc kubenswrapper[4710]: I1009 09:07:44.683980 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01eaf904-9e41-4374-8fe8-ed5f5a4053ac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "01eaf904-9e41-4374-8fe8-ed5f5a4053ac" (UID: "01eaf904-9e41-4374-8fe8-ed5f5a4053ac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:07:44 crc kubenswrapper[4710]: I1009 09:07:44.774888 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01eaf904-9e41-4374-8fe8-ed5f5a4053ac-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:07:44 crc kubenswrapper[4710]: I1009 09:07:44.775025 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tc47d\" (UniqueName: \"kubernetes.io/projected/01eaf904-9e41-4374-8fe8-ed5f5a4053ac-kube-api-access-tc47d\") on node \"crc\" DevicePath \"\"" Oct 09 09:07:44 crc kubenswrapper[4710]: I1009 09:07:44.820218 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="faced6b4-369b-4866-8fe7-e9c80f9fc52b" path="/var/lib/kubelet/pods/faced6b4-369b-4866-8fe7-e9c80f9fc52b/volumes" Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.265705 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tqk6t"] Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.266257 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tqk6t" podUID="542f16e8-eda7-4525-ba86-cc890a369f30" containerName="registry-server" containerID="cri-o://6e239399b79e9d541524ee973c8b946fa7479e2e50c555f7a1985027b71b87fe" gracePeriod=2 Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.331482 4710 generic.go:334] "Generic (PLEG): container finished" podID="01eaf904-9e41-4374-8fe8-ed5f5a4053ac" containerID="bd58837a4b3e765e6e4872ce7dcb5690f344eb436aa08284fee8c05e31d84281" exitCode=0 Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.331525 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sjp9w" event={"ID":"01eaf904-9e41-4374-8fe8-ed5f5a4053ac","Type":"ContainerDied","Data":"bd58837a4b3e765e6e4872ce7dcb5690f344eb436aa08284fee8c05e31d84281"} Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.331556 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sjp9w" event={"ID":"01eaf904-9e41-4374-8fe8-ed5f5a4053ac","Type":"ContainerDied","Data":"5e0c26fc79c980d3f37be0c39f68a9ae757f80f437bd66c0b5e057dfb7b9a030"} Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.331572 4710 scope.go:117] "RemoveContainer" containerID="bd58837a4b3e765e6e4872ce7dcb5690f344eb436aa08284fee8c05e31d84281" Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.331702 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sjp9w" Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.355408 4710 scope.go:117] "RemoveContainer" containerID="da2129a2d6f753d2d9507e3ad5226923598903e017af8ac2436b37bbe58814f4" Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.360827 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sjp9w"] Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.372127 4710 scope.go:117] "RemoveContainer" containerID="229a10720cd460085c427398fcf2891ce83a80342a723a0b223736756fcb882c" Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.372880 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-sjp9w"] Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.403364 4710 scope.go:117] "RemoveContainer" containerID="bd58837a4b3e765e6e4872ce7dcb5690f344eb436aa08284fee8c05e31d84281" Oct 09 09:07:45 crc kubenswrapper[4710]: E1009 09:07:45.403748 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd58837a4b3e765e6e4872ce7dcb5690f344eb436aa08284fee8c05e31d84281\": container with ID starting with bd58837a4b3e765e6e4872ce7dcb5690f344eb436aa08284fee8c05e31d84281 not found: ID does not exist" containerID="bd58837a4b3e765e6e4872ce7dcb5690f344eb436aa08284fee8c05e31d84281" Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.403786 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd58837a4b3e765e6e4872ce7dcb5690f344eb436aa08284fee8c05e31d84281"} err="failed to get container status \"bd58837a4b3e765e6e4872ce7dcb5690f344eb436aa08284fee8c05e31d84281\": rpc error: code = NotFound desc = could not find container \"bd58837a4b3e765e6e4872ce7dcb5690f344eb436aa08284fee8c05e31d84281\": container with ID starting with bd58837a4b3e765e6e4872ce7dcb5690f344eb436aa08284fee8c05e31d84281 not found: ID does not exist" Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.403813 4710 scope.go:117] "RemoveContainer" containerID="da2129a2d6f753d2d9507e3ad5226923598903e017af8ac2436b37bbe58814f4" Oct 09 09:07:45 crc kubenswrapper[4710]: E1009 09:07:45.404188 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da2129a2d6f753d2d9507e3ad5226923598903e017af8ac2436b37bbe58814f4\": container with ID starting with da2129a2d6f753d2d9507e3ad5226923598903e017af8ac2436b37bbe58814f4 not found: ID does not exist" containerID="da2129a2d6f753d2d9507e3ad5226923598903e017af8ac2436b37bbe58814f4" Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.404224 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da2129a2d6f753d2d9507e3ad5226923598903e017af8ac2436b37bbe58814f4"} err="failed to get container status \"da2129a2d6f753d2d9507e3ad5226923598903e017af8ac2436b37bbe58814f4\": rpc error: code = NotFound desc = could not find container \"da2129a2d6f753d2d9507e3ad5226923598903e017af8ac2436b37bbe58814f4\": container with ID starting with da2129a2d6f753d2d9507e3ad5226923598903e017af8ac2436b37bbe58814f4 not found: ID does not exist" Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.404252 4710 scope.go:117] "RemoveContainer" containerID="229a10720cd460085c427398fcf2891ce83a80342a723a0b223736756fcb882c" Oct 09 09:07:45 crc kubenswrapper[4710]: E1009 09:07:45.404541 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"229a10720cd460085c427398fcf2891ce83a80342a723a0b223736756fcb882c\": container with ID starting with 229a10720cd460085c427398fcf2891ce83a80342a723a0b223736756fcb882c not found: ID does not exist" containerID="229a10720cd460085c427398fcf2891ce83a80342a723a0b223736756fcb882c" Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.404577 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"229a10720cd460085c427398fcf2891ce83a80342a723a0b223736756fcb882c"} err="failed to get container status \"229a10720cd460085c427398fcf2891ce83a80342a723a0b223736756fcb882c\": rpc error: code = NotFound desc = could not find container \"229a10720cd460085c427398fcf2891ce83a80342a723a0b223736756fcb882c\": container with ID starting with 229a10720cd460085c427398fcf2891ce83a80342a723a0b223736756fcb882c not found: ID does not exist" Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.539328 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tqk6t" Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.686842 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/542f16e8-eda7-4525-ba86-cc890a369f30-catalog-content\") pod \"542f16e8-eda7-4525-ba86-cc890a369f30\" (UID: \"542f16e8-eda7-4525-ba86-cc890a369f30\") " Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.686916 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/542f16e8-eda7-4525-ba86-cc890a369f30-utilities\") pod \"542f16e8-eda7-4525-ba86-cc890a369f30\" (UID: \"542f16e8-eda7-4525-ba86-cc890a369f30\") " Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.686988 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ph8h\" (UniqueName: \"kubernetes.io/projected/542f16e8-eda7-4525-ba86-cc890a369f30-kube-api-access-2ph8h\") pod \"542f16e8-eda7-4525-ba86-cc890a369f30\" (UID: \"542f16e8-eda7-4525-ba86-cc890a369f30\") " Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.687754 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/542f16e8-eda7-4525-ba86-cc890a369f30-utilities" (OuterVolumeSpecName: "utilities") pod "542f16e8-eda7-4525-ba86-cc890a369f30" (UID: "542f16e8-eda7-4525-ba86-cc890a369f30"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.691307 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/542f16e8-eda7-4525-ba86-cc890a369f30-kube-api-access-2ph8h" (OuterVolumeSpecName: "kube-api-access-2ph8h") pod "542f16e8-eda7-4525-ba86-cc890a369f30" (UID: "542f16e8-eda7-4525-ba86-cc890a369f30"). InnerVolumeSpecName "kube-api-access-2ph8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.747957 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/542f16e8-eda7-4525-ba86-cc890a369f30-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "542f16e8-eda7-4525-ba86-cc890a369f30" (UID: "542f16e8-eda7-4525-ba86-cc890a369f30"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.788149 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ph8h\" (UniqueName: \"kubernetes.io/projected/542f16e8-eda7-4525-ba86-cc890a369f30-kube-api-access-2ph8h\") on node \"crc\" DevicePath \"\"" Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.788303 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/542f16e8-eda7-4525-ba86-cc890a369f30-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:07:45 crc kubenswrapper[4710]: I1009 09:07:45.788372 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/542f16e8-eda7-4525-ba86-cc890a369f30-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:07:46 crc kubenswrapper[4710]: I1009 09:07:46.337732 4710 generic.go:334] "Generic (PLEG): container finished" podID="542f16e8-eda7-4525-ba86-cc890a369f30" containerID="6e239399b79e9d541524ee973c8b946fa7479e2e50c555f7a1985027b71b87fe" exitCode=0 Oct 09 09:07:46 crc kubenswrapper[4710]: I1009 09:07:46.337767 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tqk6t" event={"ID":"542f16e8-eda7-4525-ba86-cc890a369f30","Type":"ContainerDied","Data":"6e239399b79e9d541524ee973c8b946fa7479e2e50c555f7a1985027b71b87fe"} Oct 09 09:07:46 crc kubenswrapper[4710]: I1009 09:07:46.337789 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tqk6t" event={"ID":"542f16e8-eda7-4525-ba86-cc890a369f30","Type":"ContainerDied","Data":"9a80fc6af46e6effa927c2acd6fb236094e8486370071fb7a2403171f5737e31"} Oct 09 09:07:46 crc kubenswrapper[4710]: I1009 09:07:46.337806 4710 scope.go:117] "RemoveContainer" containerID="6e239399b79e9d541524ee973c8b946fa7479e2e50c555f7a1985027b71b87fe" Oct 09 09:07:46 crc kubenswrapper[4710]: I1009 09:07:46.337906 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tqk6t" Oct 09 09:07:46 crc kubenswrapper[4710]: I1009 09:07:46.351910 4710 scope.go:117] "RemoveContainer" containerID="2a5926109f5917dc40a03f1ff002f92b0b37d4f00ed553323641c65a12dcdf4c" Oct 09 09:07:46 crc kubenswrapper[4710]: I1009 09:07:46.360711 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tqk6t"] Oct 09 09:07:46 crc kubenswrapper[4710]: I1009 09:07:46.364437 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tqk6t"] Oct 09 09:07:46 crc kubenswrapper[4710]: I1009 09:07:46.375655 4710 scope.go:117] "RemoveContainer" containerID="4e2bfa3172f97e42d63bf6acac501be1a646fce6a1876403f3c5d7495b1d4e0e" Oct 09 09:07:46 crc kubenswrapper[4710]: I1009 09:07:46.389200 4710 scope.go:117] "RemoveContainer" containerID="6e239399b79e9d541524ee973c8b946fa7479e2e50c555f7a1985027b71b87fe" Oct 09 09:07:46 crc kubenswrapper[4710]: E1009 09:07:46.389574 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e239399b79e9d541524ee973c8b946fa7479e2e50c555f7a1985027b71b87fe\": container with ID starting with 6e239399b79e9d541524ee973c8b946fa7479e2e50c555f7a1985027b71b87fe not found: ID does not exist" containerID="6e239399b79e9d541524ee973c8b946fa7479e2e50c555f7a1985027b71b87fe" Oct 09 09:07:46 crc kubenswrapper[4710]: I1009 09:07:46.389618 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e239399b79e9d541524ee973c8b946fa7479e2e50c555f7a1985027b71b87fe"} err="failed to get container status \"6e239399b79e9d541524ee973c8b946fa7479e2e50c555f7a1985027b71b87fe\": rpc error: code = NotFound desc = could not find container \"6e239399b79e9d541524ee973c8b946fa7479e2e50c555f7a1985027b71b87fe\": container with ID starting with 6e239399b79e9d541524ee973c8b946fa7479e2e50c555f7a1985027b71b87fe not found: ID does not exist" Oct 09 09:07:46 crc kubenswrapper[4710]: I1009 09:07:46.389648 4710 scope.go:117] "RemoveContainer" containerID="2a5926109f5917dc40a03f1ff002f92b0b37d4f00ed553323641c65a12dcdf4c" Oct 09 09:07:46 crc kubenswrapper[4710]: E1009 09:07:46.389980 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a5926109f5917dc40a03f1ff002f92b0b37d4f00ed553323641c65a12dcdf4c\": container with ID starting with 2a5926109f5917dc40a03f1ff002f92b0b37d4f00ed553323641c65a12dcdf4c not found: ID does not exist" containerID="2a5926109f5917dc40a03f1ff002f92b0b37d4f00ed553323641c65a12dcdf4c" Oct 09 09:07:46 crc kubenswrapper[4710]: I1009 09:07:46.389999 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a5926109f5917dc40a03f1ff002f92b0b37d4f00ed553323641c65a12dcdf4c"} err="failed to get container status \"2a5926109f5917dc40a03f1ff002f92b0b37d4f00ed553323641c65a12dcdf4c\": rpc error: code = NotFound desc = could not find container \"2a5926109f5917dc40a03f1ff002f92b0b37d4f00ed553323641c65a12dcdf4c\": container with ID starting with 2a5926109f5917dc40a03f1ff002f92b0b37d4f00ed553323641c65a12dcdf4c not found: ID does not exist" Oct 09 09:07:46 crc kubenswrapper[4710]: I1009 09:07:46.390014 4710 scope.go:117] "RemoveContainer" containerID="4e2bfa3172f97e42d63bf6acac501be1a646fce6a1876403f3c5d7495b1d4e0e" Oct 09 09:07:46 crc kubenswrapper[4710]: E1009 09:07:46.390274 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e2bfa3172f97e42d63bf6acac501be1a646fce6a1876403f3c5d7495b1d4e0e\": container with ID starting with 4e2bfa3172f97e42d63bf6acac501be1a646fce6a1876403f3c5d7495b1d4e0e not found: ID does not exist" containerID="4e2bfa3172f97e42d63bf6acac501be1a646fce6a1876403f3c5d7495b1d4e0e" Oct 09 09:07:46 crc kubenswrapper[4710]: I1009 09:07:46.390289 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e2bfa3172f97e42d63bf6acac501be1a646fce6a1876403f3c5d7495b1d4e0e"} err="failed to get container status \"4e2bfa3172f97e42d63bf6acac501be1a646fce6a1876403f3c5d7495b1d4e0e\": rpc error: code = NotFound desc = could not find container \"4e2bfa3172f97e42d63bf6acac501be1a646fce6a1876403f3c5d7495b1d4e0e\": container with ID starting with 4e2bfa3172f97e42d63bf6acac501be1a646fce6a1876403f3c5d7495b1d4e0e not found: ID does not exist" Oct 09 09:07:46 crc kubenswrapper[4710]: I1009 09:07:46.824009 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01eaf904-9e41-4374-8fe8-ed5f5a4053ac" path="/var/lib/kubelet/pods/01eaf904-9e41-4374-8fe8-ed5f5a4053ac/volumes" Oct 09 09:07:46 crc kubenswrapper[4710]: I1009 09:07:46.825468 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="542f16e8-eda7-4525-ba86-cc890a369f30" path="/var/lib/kubelet/pods/542f16e8-eda7-4525-ba86-cc890a369f30/volumes" Oct 09 09:07:47 crc kubenswrapper[4710]: I1009 09:07:47.480606 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-q9t84" Oct 09 09:07:47 crc kubenswrapper[4710]: I1009 09:07:47.868811 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7w7ww" Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.067536 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7w7ww"] Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.067905 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7w7ww" podUID="b67ac216-99a7-41b5-9f03-65af85f1667e" containerName="registry-server" containerID="cri-o://0cc2e6d6a9aa91acdae18bae39d39cfc998a977118c215ed838e05d3dff99d58" gracePeriod=2 Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.334226 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7w7ww" Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.361596 4710 generic.go:334] "Generic (PLEG): container finished" podID="b67ac216-99a7-41b5-9f03-65af85f1667e" containerID="0cc2e6d6a9aa91acdae18bae39d39cfc998a977118c215ed838e05d3dff99d58" exitCode=0 Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.361630 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7w7ww" event={"ID":"b67ac216-99a7-41b5-9f03-65af85f1667e","Type":"ContainerDied","Data":"0cc2e6d6a9aa91acdae18bae39d39cfc998a977118c215ed838e05d3dff99d58"} Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.361656 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7w7ww" event={"ID":"b67ac216-99a7-41b5-9f03-65af85f1667e","Type":"ContainerDied","Data":"c04b1a94f3b74da05c4700375fb32833019c082e8ba9fb221d6a7db2f2f67e92"} Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.361672 4710 scope.go:117] "RemoveContainer" containerID="0cc2e6d6a9aa91acdae18bae39d39cfc998a977118c215ed838e05d3dff99d58" Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.361736 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7w7ww" Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.376912 4710 scope.go:117] "RemoveContainer" containerID="9dfc4fe7bebe1f9b28d4162f94709edc8416078a2c62f907ea6d891209c7a836" Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.387519 4710 scope.go:117] "RemoveContainer" containerID="220038b488ebd0775fccb6ec39a897a75b79fc2782e5f5c55218c8a7ad97f0f6" Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.408753 4710 scope.go:117] "RemoveContainer" containerID="0cc2e6d6a9aa91acdae18bae39d39cfc998a977118c215ed838e05d3dff99d58" Oct 09 09:07:50 crc kubenswrapper[4710]: E1009 09:07:50.409217 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cc2e6d6a9aa91acdae18bae39d39cfc998a977118c215ed838e05d3dff99d58\": container with ID starting with 0cc2e6d6a9aa91acdae18bae39d39cfc998a977118c215ed838e05d3dff99d58 not found: ID does not exist" containerID="0cc2e6d6a9aa91acdae18bae39d39cfc998a977118c215ed838e05d3dff99d58" Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.409252 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cc2e6d6a9aa91acdae18bae39d39cfc998a977118c215ed838e05d3dff99d58"} err="failed to get container status \"0cc2e6d6a9aa91acdae18bae39d39cfc998a977118c215ed838e05d3dff99d58\": rpc error: code = NotFound desc = could not find container \"0cc2e6d6a9aa91acdae18bae39d39cfc998a977118c215ed838e05d3dff99d58\": container with ID starting with 0cc2e6d6a9aa91acdae18bae39d39cfc998a977118c215ed838e05d3dff99d58 not found: ID does not exist" Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.409386 4710 scope.go:117] "RemoveContainer" containerID="9dfc4fe7bebe1f9b28d4162f94709edc8416078a2c62f907ea6d891209c7a836" Oct 09 09:07:50 crc kubenswrapper[4710]: E1009 09:07:50.410075 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9dfc4fe7bebe1f9b28d4162f94709edc8416078a2c62f907ea6d891209c7a836\": container with ID starting with 9dfc4fe7bebe1f9b28d4162f94709edc8416078a2c62f907ea6d891209c7a836 not found: ID does not exist" containerID="9dfc4fe7bebe1f9b28d4162f94709edc8416078a2c62f907ea6d891209c7a836" Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.410116 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dfc4fe7bebe1f9b28d4162f94709edc8416078a2c62f907ea6d891209c7a836"} err="failed to get container status \"9dfc4fe7bebe1f9b28d4162f94709edc8416078a2c62f907ea6d891209c7a836\": rpc error: code = NotFound desc = could not find container \"9dfc4fe7bebe1f9b28d4162f94709edc8416078a2c62f907ea6d891209c7a836\": container with ID starting with 9dfc4fe7bebe1f9b28d4162f94709edc8416078a2c62f907ea6d891209c7a836 not found: ID does not exist" Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.410135 4710 scope.go:117] "RemoveContainer" containerID="220038b488ebd0775fccb6ec39a897a75b79fc2782e5f5c55218c8a7ad97f0f6" Oct 09 09:07:50 crc kubenswrapper[4710]: E1009 09:07:50.410647 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"220038b488ebd0775fccb6ec39a897a75b79fc2782e5f5c55218c8a7ad97f0f6\": container with ID starting with 220038b488ebd0775fccb6ec39a897a75b79fc2782e5f5c55218c8a7ad97f0f6 not found: ID does not exist" containerID="220038b488ebd0775fccb6ec39a897a75b79fc2782e5f5c55218c8a7ad97f0f6" Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.410672 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"220038b488ebd0775fccb6ec39a897a75b79fc2782e5f5c55218c8a7ad97f0f6"} err="failed to get container status \"220038b488ebd0775fccb6ec39a897a75b79fc2782e5f5c55218c8a7ad97f0f6\": rpc error: code = NotFound desc = could not find container \"220038b488ebd0775fccb6ec39a897a75b79fc2782e5f5c55218c8a7ad97f0f6\": container with ID starting with 220038b488ebd0775fccb6ec39a897a75b79fc2782e5f5c55218c8a7ad97f0f6 not found: ID does not exist" Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.445365 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b67ac216-99a7-41b5-9f03-65af85f1667e-utilities\") pod \"b67ac216-99a7-41b5-9f03-65af85f1667e\" (UID: \"b67ac216-99a7-41b5-9f03-65af85f1667e\") " Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.445415 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b67ac216-99a7-41b5-9f03-65af85f1667e-catalog-content\") pod \"b67ac216-99a7-41b5-9f03-65af85f1667e\" (UID: \"b67ac216-99a7-41b5-9f03-65af85f1667e\") " Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.445479 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66tmt\" (UniqueName: \"kubernetes.io/projected/b67ac216-99a7-41b5-9f03-65af85f1667e-kube-api-access-66tmt\") pod \"b67ac216-99a7-41b5-9f03-65af85f1667e\" (UID: \"b67ac216-99a7-41b5-9f03-65af85f1667e\") " Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.446283 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b67ac216-99a7-41b5-9f03-65af85f1667e-utilities" (OuterVolumeSpecName: "utilities") pod "b67ac216-99a7-41b5-9f03-65af85f1667e" (UID: "b67ac216-99a7-41b5-9f03-65af85f1667e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.449993 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b67ac216-99a7-41b5-9f03-65af85f1667e-kube-api-access-66tmt" (OuterVolumeSpecName: "kube-api-access-66tmt") pod "b67ac216-99a7-41b5-9f03-65af85f1667e" (UID: "b67ac216-99a7-41b5-9f03-65af85f1667e"). InnerVolumeSpecName "kube-api-access-66tmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.485077 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b67ac216-99a7-41b5-9f03-65af85f1667e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b67ac216-99a7-41b5-9f03-65af85f1667e" (UID: "b67ac216-99a7-41b5-9f03-65af85f1667e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.546947 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b67ac216-99a7-41b5-9f03-65af85f1667e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.546980 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66tmt\" (UniqueName: \"kubernetes.io/projected/b67ac216-99a7-41b5-9f03-65af85f1667e-kube-api-access-66tmt\") on node \"crc\" DevicePath \"\"" Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.546995 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b67ac216-99a7-41b5-9f03-65af85f1667e-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.682264 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7w7ww"] Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.684785 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7w7ww"] Oct 09 09:07:50 crc kubenswrapper[4710]: I1009 09:07:50.820031 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b67ac216-99a7-41b5-9f03-65af85f1667e" path="/var/lib/kubelet/pods/b67ac216-99a7-41b5-9f03-65af85f1667e/volumes" Oct 09 09:08:05 crc kubenswrapper[4710]: I1009 09:08:05.546474 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:08:05 crc kubenswrapper[4710]: I1009 09:08:05.547080 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:08:05 crc kubenswrapper[4710]: I1009 09:08:05.547126 4710 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:08:05 crc kubenswrapper[4710]: I1009 09:08:05.547695 4710 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a"} pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 09:08:05 crc kubenswrapper[4710]: I1009 09:08:05.547743 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" containerID="cri-o://003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a" gracePeriod=600 Oct 09 09:08:06 crc kubenswrapper[4710]: I1009 09:08:06.459718 4710 generic.go:334] "Generic (PLEG): container finished" podID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerID="003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a" exitCode=0 Oct 09 09:08:06 crc kubenswrapper[4710]: I1009 09:08:06.459819 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerDied","Data":"003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a"} Oct 09 09:08:06 crc kubenswrapper[4710]: I1009 09:08:06.460020 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerStarted","Data":"c7e7af49e6c1ca0ce0353e8934a08e6c28c703255882b4f40e6190d8a146137c"} Oct 09 09:08:08 crc kubenswrapper[4710]: I1009 09:08:08.493056 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7d642"] Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.515338 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-7d642" podUID="21d2e430-51d2-41b4-89e6-4af6eceaf5d3" containerName="oauth-openshift" containerID="cri-o://f83ceb6d735c190a4a36f38c03f142bd7e842eae3159a1958c895572cf3d6958" gracePeriod=15 Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.807337 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.838495 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-7544d6d989-7x2xb"] Oct 09 09:08:33 crc kubenswrapper[4710]: E1009 09:08:33.838971 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95960189-5bcf-4fcf-bd91-99ac89579dbe" containerName="pruner" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.838983 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="95960189-5bcf-4fcf-bd91-99ac89579dbe" containerName="pruner" Oct 09 09:08:33 crc kubenswrapper[4710]: E1009 09:08:33.838996 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faced6b4-369b-4866-8fe7-e9c80f9fc52b" containerName="extract-content" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.839003 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="faced6b4-369b-4866-8fe7-e9c80f9fc52b" containerName="extract-content" Oct 09 09:08:33 crc kubenswrapper[4710]: E1009 09:08:33.839012 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21d2e430-51d2-41b4-89e6-4af6eceaf5d3" containerName="oauth-openshift" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.839018 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="21d2e430-51d2-41b4-89e6-4af6eceaf5d3" containerName="oauth-openshift" Oct 09 09:08:33 crc kubenswrapper[4710]: E1009 09:08:33.839024 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b67ac216-99a7-41b5-9f03-65af85f1667e" containerName="extract-content" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.839031 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="b67ac216-99a7-41b5-9f03-65af85f1667e" containerName="extract-content" Oct 09 09:08:33 crc kubenswrapper[4710]: E1009 09:08:33.839041 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01eaf904-9e41-4374-8fe8-ed5f5a4053ac" containerName="extract-utilities" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.839046 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="01eaf904-9e41-4374-8fe8-ed5f5a4053ac" containerName="extract-utilities" Oct 09 09:08:33 crc kubenswrapper[4710]: E1009 09:08:33.839051 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faced6b4-369b-4866-8fe7-e9c80f9fc52b" containerName="registry-server" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.839058 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="faced6b4-369b-4866-8fe7-e9c80f9fc52b" containerName="registry-server" Oct 09 09:08:33 crc kubenswrapper[4710]: E1009 09:08:33.839068 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b67ac216-99a7-41b5-9f03-65af85f1667e" containerName="registry-server" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.839073 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="b67ac216-99a7-41b5-9f03-65af85f1667e" containerName="registry-server" Oct 09 09:08:33 crc kubenswrapper[4710]: E1009 09:08:33.839082 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="542f16e8-eda7-4525-ba86-cc890a369f30" containerName="registry-server" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.839087 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="542f16e8-eda7-4525-ba86-cc890a369f30" containerName="registry-server" Oct 09 09:08:33 crc kubenswrapper[4710]: E1009 09:08:33.839095 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faced6b4-369b-4866-8fe7-e9c80f9fc52b" containerName="extract-utilities" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.839099 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="faced6b4-369b-4866-8fe7-e9c80f9fc52b" containerName="extract-utilities" Oct 09 09:08:33 crc kubenswrapper[4710]: E1009 09:08:33.839105 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01eaf904-9e41-4374-8fe8-ed5f5a4053ac" containerName="registry-server" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.839110 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="01eaf904-9e41-4374-8fe8-ed5f5a4053ac" containerName="registry-server" Oct 09 09:08:33 crc kubenswrapper[4710]: E1009 09:08:33.839121 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="542f16e8-eda7-4525-ba86-cc890a369f30" containerName="extract-content" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.839127 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="542f16e8-eda7-4525-ba86-cc890a369f30" containerName="extract-content" Oct 09 09:08:33 crc kubenswrapper[4710]: E1009 09:08:33.839134 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b67ac216-99a7-41b5-9f03-65af85f1667e" containerName="extract-utilities" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.839139 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="b67ac216-99a7-41b5-9f03-65af85f1667e" containerName="extract-utilities" Oct 09 09:08:33 crc kubenswrapper[4710]: E1009 09:08:33.839147 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="542f16e8-eda7-4525-ba86-cc890a369f30" containerName="extract-utilities" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.839152 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="542f16e8-eda7-4525-ba86-cc890a369f30" containerName="extract-utilities" Oct 09 09:08:33 crc kubenswrapper[4710]: E1009 09:08:33.839157 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01eaf904-9e41-4374-8fe8-ed5f5a4053ac" containerName="extract-content" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.839221 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="01eaf904-9e41-4374-8fe8-ed5f5a4053ac" containerName="extract-content" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.839315 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="b67ac216-99a7-41b5-9f03-65af85f1667e" containerName="registry-server" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.839325 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="21d2e430-51d2-41b4-89e6-4af6eceaf5d3" containerName="oauth-openshift" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.839332 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="01eaf904-9e41-4374-8fe8-ed5f5a4053ac" containerName="registry-server" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.839342 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="faced6b4-369b-4866-8fe7-e9c80f9fc52b" containerName="registry-server" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.839350 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="542f16e8-eda7-4525-ba86-cc890a369f30" containerName="registry-server" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.839360 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="95960189-5bcf-4fcf-bd91-99ac89579dbe" containerName="pruner" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.839687 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.848299 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.848346 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.848391 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-user-template-error\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.848417 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.848588 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-system-session\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.848662 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5xw9\" (UniqueName: \"kubernetes.io/projected/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-kube-api-access-b5xw9\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.848729 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.848876 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-system-service-ca\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.848932 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-system-router-certs\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.848952 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-audit-policies\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.848977 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.849009 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-audit-dir\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.849044 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.849114 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-user-template-login\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.856291 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7544d6d989-7x2xb"] Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.950664 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-template-provider-selection\") pod \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.950716 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2l4md\" (UniqueName: \"kubernetes.io/projected/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-kube-api-access-2l4md\") pod \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.950779 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-idp-0-file-data\") pod \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.950822 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-template-login\") pod \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.950840 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-audit-dir\") pod \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.950871 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-trusted-ca-bundle\") pod \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.950903 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-router-certs\") pod \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.950926 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-ocp-branding-template\") pod \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.950963 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-cliconfig\") pod \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.950986 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-session\") pod \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.951021 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-serving-cert\") pod \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.951050 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-audit-policies\") pod \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.951093 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-template-error\") pod \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.951117 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-service-ca\") pod \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\" (UID: \"21d2e430-51d2-41b4-89e6-4af6eceaf5d3\") " Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.951338 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-user-template-login\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.951382 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.951408 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.951452 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-user-template-error\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.951487 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.951516 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-system-session\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.951541 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5xw9\" (UniqueName: \"kubernetes.io/projected/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-kube-api-access-b5xw9\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.951564 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.951601 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-system-service-ca\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.951625 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-audit-policies\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.951641 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-system-router-certs\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.951660 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.951680 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-audit-dir\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.951702 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.951904 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "21d2e430-51d2-41b4-89e6-4af6eceaf5d3" (UID: "21d2e430-51d2-41b4-89e6-4af6eceaf5d3"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.952593 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.952589 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "21d2e430-51d2-41b4-89e6-4af6eceaf5d3" (UID: "21d2e430-51d2-41b4-89e6-4af6eceaf5d3"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.953226 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-audit-policies\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.956884 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-system-router-certs\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.956999 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-user-template-login\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.957526 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "21d2e430-51d2-41b4-89e6-4af6eceaf5d3" (UID: "21d2e430-51d2-41b4-89e6-4af6eceaf5d3"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.957771 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "21d2e430-51d2-41b4-89e6-4af6eceaf5d3" (UID: "21d2e430-51d2-41b4-89e6-4af6eceaf5d3"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.957859 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.957936 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-audit-dir\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.958004 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "21d2e430-51d2-41b4-89e6-4af6eceaf5d3" (UID: "21d2e430-51d2-41b4-89e6-4af6eceaf5d3"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.959161 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-kube-api-access-2l4md" (OuterVolumeSpecName: "kube-api-access-2l4md") pod "21d2e430-51d2-41b4-89e6-4af6eceaf5d3" (UID: "21d2e430-51d2-41b4-89e6-4af6eceaf5d3"). InnerVolumeSpecName "kube-api-access-2l4md". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.959552 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "21d2e430-51d2-41b4-89e6-4af6eceaf5d3" (UID: "21d2e430-51d2-41b4-89e6-4af6eceaf5d3"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.960134 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-user-template-error\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.960226 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-system-service-ca\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.960497 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "21d2e430-51d2-41b4-89e6-4af6eceaf5d3" (UID: "21d2e430-51d2-41b4-89e6-4af6eceaf5d3"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.960597 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.960695 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.961209 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "21d2e430-51d2-41b4-89e6-4af6eceaf5d3" (UID: "21d2e430-51d2-41b4-89e6-4af6eceaf5d3"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.961575 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "21d2e430-51d2-41b4-89e6-4af6eceaf5d3" (UID: "21d2e430-51d2-41b4-89e6-4af6eceaf5d3"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.961792 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "21d2e430-51d2-41b4-89e6-4af6eceaf5d3" (UID: "21d2e430-51d2-41b4-89e6-4af6eceaf5d3"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.961853 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.962485 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.962486 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "21d2e430-51d2-41b4-89e6-4af6eceaf5d3" (UID: "21d2e430-51d2-41b4-89e6-4af6eceaf5d3"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.963050 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-v4-0-config-system-session\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.963322 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "21d2e430-51d2-41b4-89e6-4af6eceaf5d3" (UID: "21d2e430-51d2-41b4-89e6-4af6eceaf5d3"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.963639 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "21d2e430-51d2-41b4-89e6-4af6eceaf5d3" (UID: "21d2e430-51d2-41b4-89e6-4af6eceaf5d3"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:08:33 crc kubenswrapper[4710]: I1009 09:08:33.966125 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5xw9\" (UniqueName: \"kubernetes.io/projected/53deeb6d-8e7f-49ad-aa5d-5dfa8091534e-kube-api-access-b5xw9\") pod \"oauth-openshift-7544d6d989-7x2xb\" (UID: \"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e\") " pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.052414 4710 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.052585 4710 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.052662 4710 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.052724 4710 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.052785 4710 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.052843 4710 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.052893 4710 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.052942 4710 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.052999 4710 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.053054 4710 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.053105 4710 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.053155 4710 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.053211 4710 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.053280 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2l4md\" (UniqueName: \"kubernetes.io/projected/21d2e430-51d2-41b4-89e6-4af6eceaf5d3-kube-api-access-2l4md\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.163839 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.529303 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7544d6d989-7x2xb"] Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.580749 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" event={"ID":"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e","Type":"ContainerStarted","Data":"b6ac4936a84f36ae6c52860d4e0cde175140ec36c920478c0c957333fc734f6c"} Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.582586 4710 generic.go:334] "Generic (PLEG): container finished" podID="21d2e430-51d2-41b4-89e6-4af6eceaf5d3" containerID="f83ceb6d735c190a4a36f38c03f142bd7e842eae3159a1958c895572cf3d6958" exitCode=0 Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.582630 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7d642" event={"ID":"21d2e430-51d2-41b4-89e6-4af6eceaf5d3","Type":"ContainerDied","Data":"f83ceb6d735c190a4a36f38c03f142bd7e842eae3159a1958c895572cf3d6958"} Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.582657 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-7d642" event={"ID":"21d2e430-51d2-41b4-89e6-4af6eceaf5d3","Type":"ContainerDied","Data":"67f7f6a65f2d14cd8e3b71a1fa533aef07118526a3bb39710c0e4a24a17c00ec"} Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.582673 4710 scope.go:117] "RemoveContainer" containerID="f83ceb6d735c190a4a36f38c03f142bd7e842eae3159a1958c895572cf3d6958" Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.582717 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-7d642" Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.600757 4710 scope.go:117] "RemoveContainer" containerID="f83ceb6d735c190a4a36f38c03f142bd7e842eae3159a1958c895572cf3d6958" Oct 09 09:08:34 crc kubenswrapper[4710]: E1009 09:08:34.601267 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f83ceb6d735c190a4a36f38c03f142bd7e842eae3159a1958c895572cf3d6958\": container with ID starting with f83ceb6d735c190a4a36f38c03f142bd7e842eae3159a1958c895572cf3d6958 not found: ID does not exist" containerID="f83ceb6d735c190a4a36f38c03f142bd7e842eae3159a1958c895572cf3d6958" Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.601308 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f83ceb6d735c190a4a36f38c03f142bd7e842eae3159a1958c895572cf3d6958"} err="failed to get container status \"f83ceb6d735c190a4a36f38c03f142bd7e842eae3159a1958c895572cf3d6958\": rpc error: code = NotFound desc = could not find container \"f83ceb6d735c190a4a36f38c03f142bd7e842eae3159a1958c895572cf3d6958\": container with ID starting with f83ceb6d735c190a4a36f38c03f142bd7e842eae3159a1958c895572cf3d6958 not found: ID does not exist" Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.616671 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7d642"] Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.618264 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-7d642"] Oct 09 09:08:34 crc kubenswrapper[4710]: I1009 09:08:34.822008 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21d2e430-51d2-41b4-89e6-4af6eceaf5d3" path="/var/lib/kubelet/pods/21d2e430-51d2-41b4-89e6-4af6eceaf5d3/volumes" Oct 09 09:08:35 crc kubenswrapper[4710]: I1009 09:08:35.591114 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" event={"ID":"53deeb6d-8e7f-49ad-aa5d-5dfa8091534e","Type":"ContainerStarted","Data":"6678c3966072a2db4179312465da0491605bccc5d60832ded7aa34369acb39e5"} Oct 09 09:08:35 crc kubenswrapper[4710]: I1009 09:08:35.591470 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:35 crc kubenswrapper[4710]: I1009 09:08:35.596695 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" Oct 09 09:08:35 crc kubenswrapper[4710]: I1009 09:08:35.609898 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-7544d6d989-7x2xb" podStartSLOduration=27.609882207 podStartE2EDuration="27.609882207s" podCreationTimestamp="2025-10-09 09:08:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:08:35.607814924 +0000 UTC m=+239.097923321" watchObservedRunningTime="2025-10-09 09:08:35.609882207 +0000 UTC m=+239.099990604" Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.473720 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s8wd8"] Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.474558 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-s8wd8" podUID="e73b2237-e967-49cc-9368-c670f2749a60" containerName="registry-server" containerID="cri-o://76f9289ae01963cc780168d410960301e260f0d8780236909f3a7547dcaba49e" gracePeriod=30 Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.500789 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q9t84"] Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.500956 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-q9t84" podUID="db9cdb3d-9888-46e4-a5db-710577557a80" containerName="registry-server" containerID="cri-o://fbc679583112279e26b84efae67a345de0f3a026947200dc0c6ca908ff49af26" gracePeriod=30 Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.514484 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7cjtd"] Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.514703 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" podUID="d4f9a31b-650d-46d5-b8d3-9176e2048beb" containerName="marketplace-operator" containerID="cri-o://f0c56a7faa368cd4a10cb5973739a67a465e06b0ed25059fec2f0c6e927826ef" gracePeriod=30 Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.528516 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bjx77"] Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.528659 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bjx77" podUID="5e5a0c99-47a8-46dd-b869-511a979f4b8c" containerName="registry-server" containerID="cri-o://8ebb4f4d6004dddb43f114514cf441c6a88e67a4914edda0843ef27531bf2455" gracePeriod=30 Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.539697 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mf25c"] Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.541439 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mf25c" Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.542989 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l7l4g"] Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.543142 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-l7l4g" podUID="c5e7c194-8f27-4279-a34b-d8a0a94bdb03" containerName="registry-server" containerID="cri-o://2d27b34e227effb30108e30f18f2805fa7ca916fd93618aedbd6f57fd87b6afd" gracePeriod=30 Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.562591 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mf25c"] Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.660754 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0f52acde-7961-4866-8e50-2d6839085e4d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mf25c\" (UID: \"0f52acde-7961-4866-8e50-2d6839085e4d\") " pod="openshift-marketplace/marketplace-operator-79b997595-mf25c" Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.661234 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grbpc\" (UniqueName: \"kubernetes.io/projected/0f52acde-7961-4866-8e50-2d6839085e4d-kube-api-access-grbpc\") pod \"marketplace-operator-79b997595-mf25c\" (UID: \"0f52acde-7961-4866-8e50-2d6839085e4d\") " pod="openshift-marketplace/marketplace-operator-79b997595-mf25c" Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.661329 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0f52acde-7961-4866-8e50-2d6839085e4d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mf25c\" (UID: \"0f52acde-7961-4866-8e50-2d6839085e4d\") " pod="openshift-marketplace/marketplace-operator-79b997595-mf25c" Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.685401 4710 generic.go:334] "Generic (PLEG): container finished" podID="db9cdb3d-9888-46e4-a5db-710577557a80" containerID="fbc679583112279e26b84efae67a345de0f3a026947200dc0c6ca908ff49af26" exitCode=0 Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.685500 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q9t84" event={"ID":"db9cdb3d-9888-46e4-a5db-710577557a80","Type":"ContainerDied","Data":"fbc679583112279e26b84efae67a345de0f3a026947200dc0c6ca908ff49af26"} Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.690894 4710 generic.go:334] "Generic (PLEG): container finished" podID="5e5a0c99-47a8-46dd-b869-511a979f4b8c" containerID="8ebb4f4d6004dddb43f114514cf441c6a88e67a4914edda0843ef27531bf2455" exitCode=0 Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.690960 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bjx77" event={"ID":"5e5a0c99-47a8-46dd-b869-511a979f4b8c","Type":"ContainerDied","Data":"8ebb4f4d6004dddb43f114514cf441c6a88e67a4914edda0843ef27531bf2455"} Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.701928 4710 generic.go:334] "Generic (PLEG): container finished" podID="d4f9a31b-650d-46d5-b8d3-9176e2048beb" containerID="f0c56a7faa368cd4a10cb5973739a67a465e06b0ed25059fec2f0c6e927826ef" exitCode=0 Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.701978 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" event={"ID":"d4f9a31b-650d-46d5-b8d3-9176e2048beb","Type":"ContainerDied","Data":"f0c56a7faa368cd4a10cb5973739a67a465e06b0ed25059fec2f0c6e927826ef"} Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.708100 4710 generic.go:334] "Generic (PLEG): container finished" podID="e73b2237-e967-49cc-9368-c670f2749a60" containerID="76f9289ae01963cc780168d410960301e260f0d8780236909f3a7547dcaba49e" exitCode=0 Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.708145 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s8wd8" event={"ID":"e73b2237-e967-49cc-9368-c670f2749a60","Type":"ContainerDied","Data":"76f9289ae01963cc780168d410960301e260f0d8780236909f3a7547dcaba49e"} Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.762920 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0f52acde-7961-4866-8e50-2d6839085e4d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mf25c\" (UID: \"0f52acde-7961-4866-8e50-2d6839085e4d\") " pod="openshift-marketplace/marketplace-operator-79b997595-mf25c" Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.763033 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grbpc\" (UniqueName: \"kubernetes.io/projected/0f52acde-7961-4866-8e50-2d6839085e4d-kube-api-access-grbpc\") pod \"marketplace-operator-79b997595-mf25c\" (UID: \"0f52acde-7961-4866-8e50-2d6839085e4d\") " pod="openshift-marketplace/marketplace-operator-79b997595-mf25c" Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.763073 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0f52acde-7961-4866-8e50-2d6839085e4d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mf25c\" (UID: \"0f52acde-7961-4866-8e50-2d6839085e4d\") " pod="openshift-marketplace/marketplace-operator-79b997595-mf25c" Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.765671 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0f52acde-7961-4866-8e50-2d6839085e4d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mf25c\" (UID: \"0f52acde-7961-4866-8e50-2d6839085e4d\") " pod="openshift-marketplace/marketplace-operator-79b997595-mf25c" Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.769046 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/0f52acde-7961-4866-8e50-2d6839085e4d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mf25c\" (UID: \"0f52acde-7961-4866-8e50-2d6839085e4d\") " pod="openshift-marketplace/marketplace-operator-79b997595-mf25c" Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.778866 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grbpc\" (UniqueName: \"kubernetes.io/projected/0f52acde-7961-4866-8e50-2d6839085e4d-kube-api-access-grbpc\") pod \"marketplace-operator-79b997595-mf25c\" (UID: \"0f52acde-7961-4866-8e50-2d6839085e4d\") " pod="openshift-marketplace/marketplace-operator-79b997595-mf25c" Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.810751 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s8wd8" Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.879569 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q9t84" Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.904660 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bjx77" Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.969501 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.974533 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db9cdb3d-9888-46e4-a5db-710577557a80-utilities\") pod \"db9cdb3d-9888-46e4-a5db-710577557a80\" (UID: \"db9cdb3d-9888-46e4-a5db-710577557a80\") " Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.974645 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rvzv\" (UniqueName: \"kubernetes.io/projected/e73b2237-e967-49cc-9368-c670f2749a60-kube-api-access-8rvzv\") pod \"e73b2237-e967-49cc-9368-c670f2749a60\" (UID: \"e73b2237-e967-49cc-9368-c670f2749a60\") " Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.974703 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e73b2237-e967-49cc-9368-c670f2749a60-utilities\") pod \"e73b2237-e967-49cc-9368-c670f2749a60\" (UID: \"e73b2237-e967-49cc-9368-c670f2749a60\") " Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.974724 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2z2x\" (UniqueName: \"kubernetes.io/projected/db9cdb3d-9888-46e4-a5db-710577557a80-kube-api-access-c2z2x\") pod \"db9cdb3d-9888-46e4-a5db-710577557a80\" (UID: \"db9cdb3d-9888-46e4-a5db-710577557a80\") " Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.974761 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db9cdb3d-9888-46e4-a5db-710577557a80-catalog-content\") pod \"db9cdb3d-9888-46e4-a5db-710577557a80\" (UID: \"db9cdb3d-9888-46e4-a5db-710577557a80\") " Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.974813 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e73b2237-e967-49cc-9368-c670f2749a60-catalog-content\") pod \"e73b2237-e967-49cc-9368-c670f2749a60\" (UID: \"e73b2237-e967-49cc-9368-c670f2749a60\") " Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.975415 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db9cdb3d-9888-46e4-a5db-710577557a80-utilities" (OuterVolumeSpecName: "utilities") pod "db9cdb3d-9888-46e4-a5db-710577557a80" (UID: "db9cdb3d-9888-46e4-a5db-710577557a80"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.976034 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e73b2237-e967-49cc-9368-c670f2749a60-utilities" (OuterVolumeSpecName: "utilities") pod "e73b2237-e967-49cc-9368-c670f2749a60" (UID: "e73b2237-e967-49cc-9368-c670f2749a60"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.976632 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e73b2237-e967-49cc-9368-c670f2749a60-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.976648 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db9cdb3d-9888-46e4-a5db-710577557a80-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.999472 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db9cdb3d-9888-46e4-a5db-710577557a80-kube-api-access-c2z2x" (OuterVolumeSpecName: "kube-api-access-c2z2x") pod "db9cdb3d-9888-46e4-a5db-710577557a80" (UID: "db9cdb3d-9888-46e4-a5db-710577557a80"). InnerVolumeSpecName "kube-api-access-c2z2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:08:50 crc kubenswrapper[4710]: I1009 09:08:50.999533 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e73b2237-e967-49cc-9368-c670f2749a60-kube-api-access-8rvzv" (OuterVolumeSpecName: "kube-api-access-8rvzv") pod "e73b2237-e967-49cc-9368-c670f2749a60" (UID: "e73b2237-e967-49cc-9368-c670f2749a60"). InnerVolumeSpecName "kube-api-access-8rvzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.007143 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l7l4g" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.019286 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e73b2237-e967-49cc-9368-c670f2749a60-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e73b2237-e967-49cc-9368-c670f2749a60" (UID: "e73b2237-e967-49cc-9368-c670f2749a60"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.040853 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mf25c" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.055398 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db9cdb3d-9888-46e4-a5db-710577557a80-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "db9cdb3d-9888-46e4-a5db-710577557a80" (UID: "db9cdb3d-9888-46e4-a5db-710577557a80"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.078013 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e5a0c99-47a8-46dd-b869-511a979f4b8c-utilities\") pod \"5e5a0c99-47a8-46dd-b869-511a979f4b8c\" (UID: \"5e5a0c99-47a8-46dd-b869-511a979f4b8c\") " Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.078223 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25tpl\" (UniqueName: \"kubernetes.io/projected/5e5a0c99-47a8-46dd-b869-511a979f4b8c-kube-api-access-25tpl\") pod \"5e5a0c99-47a8-46dd-b869-511a979f4b8c\" (UID: \"5e5a0c99-47a8-46dd-b869-511a979f4b8c\") " Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.078265 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d4f9a31b-650d-46d5-b8d3-9176e2048beb-marketplace-trusted-ca\") pod \"d4f9a31b-650d-46d5-b8d3-9176e2048beb\" (UID: \"d4f9a31b-650d-46d5-b8d3-9176e2048beb\") " Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.078331 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e5a0c99-47a8-46dd-b869-511a979f4b8c-catalog-content\") pod \"5e5a0c99-47a8-46dd-b869-511a979f4b8c\" (UID: \"5e5a0c99-47a8-46dd-b869-511a979f4b8c\") " Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.078425 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d4f9a31b-650d-46d5-b8d3-9176e2048beb-marketplace-operator-metrics\") pod \"d4f9a31b-650d-46d5-b8d3-9176e2048beb\" (UID: \"d4f9a31b-650d-46d5-b8d3-9176e2048beb\") " Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.078484 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4jzh\" (UniqueName: \"kubernetes.io/projected/d4f9a31b-650d-46d5-b8d3-9176e2048beb-kube-api-access-t4jzh\") pod \"d4f9a31b-650d-46d5-b8d3-9176e2048beb\" (UID: \"d4f9a31b-650d-46d5-b8d3-9176e2048beb\") " Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.078927 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4f9a31b-650d-46d5-b8d3-9176e2048beb-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "d4f9a31b-650d-46d5-b8d3-9176e2048beb" (UID: "d4f9a31b-650d-46d5-b8d3-9176e2048beb"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.079141 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rvzv\" (UniqueName: \"kubernetes.io/projected/e73b2237-e967-49cc-9368-c670f2749a60-kube-api-access-8rvzv\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.079183 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2z2x\" (UniqueName: \"kubernetes.io/projected/db9cdb3d-9888-46e4-a5db-710577557a80-kube-api-access-c2z2x\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.079196 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db9cdb3d-9888-46e4-a5db-710577557a80-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.079207 4710 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d4f9a31b-650d-46d5-b8d3-9176e2048beb-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.079216 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e73b2237-e967-49cc-9368-c670f2749a60-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.079266 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e5a0c99-47a8-46dd-b869-511a979f4b8c-utilities" (OuterVolumeSpecName: "utilities") pod "5e5a0c99-47a8-46dd-b869-511a979f4b8c" (UID: "5e5a0c99-47a8-46dd-b869-511a979f4b8c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.082273 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e5a0c99-47a8-46dd-b869-511a979f4b8c-kube-api-access-25tpl" (OuterVolumeSpecName: "kube-api-access-25tpl") pod "5e5a0c99-47a8-46dd-b869-511a979f4b8c" (UID: "5e5a0c99-47a8-46dd-b869-511a979f4b8c"). InnerVolumeSpecName "kube-api-access-25tpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.083578 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4f9a31b-650d-46d5-b8d3-9176e2048beb-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "d4f9a31b-650d-46d5-b8d3-9176e2048beb" (UID: "d4f9a31b-650d-46d5-b8d3-9176e2048beb"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.089797 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4f9a31b-650d-46d5-b8d3-9176e2048beb-kube-api-access-t4jzh" (OuterVolumeSpecName: "kube-api-access-t4jzh") pod "d4f9a31b-650d-46d5-b8d3-9176e2048beb" (UID: "d4f9a31b-650d-46d5-b8d3-9176e2048beb"). InnerVolumeSpecName "kube-api-access-t4jzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.091224 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e5a0c99-47a8-46dd-b869-511a979f4b8c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5e5a0c99-47a8-46dd-b869-511a979f4b8c" (UID: "5e5a0c99-47a8-46dd-b869-511a979f4b8c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.180137 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fccbb\" (UniqueName: \"kubernetes.io/projected/c5e7c194-8f27-4279-a34b-d8a0a94bdb03-kube-api-access-fccbb\") pod \"c5e7c194-8f27-4279-a34b-d8a0a94bdb03\" (UID: \"c5e7c194-8f27-4279-a34b-d8a0a94bdb03\") " Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.180307 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5e7c194-8f27-4279-a34b-d8a0a94bdb03-catalog-content\") pod \"c5e7c194-8f27-4279-a34b-d8a0a94bdb03\" (UID: \"c5e7c194-8f27-4279-a34b-d8a0a94bdb03\") " Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.180558 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5e7c194-8f27-4279-a34b-d8a0a94bdb03-utilities\") pod \"c5e7c194-8f27-4279-a34b-d8a0a94bdb03\" (UID: \"c5e7c194-8f27-4279-a34b-d8a0a94bdb03\") " Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.180974 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5e5a0c99-47a8-46dd-b869-511a979f4b8c-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.180996 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25tpl\" (UniqueName: \"kubernetes.io/projected/5e5a0c99-47a8-46dd-b869-511a979f4b8c-kube-api-access-25tpl\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.181007 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5e5a0c99-47a8-46dd-b869-511a979f4b8c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.181018 4710 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d4f9a31b-650d-46d5-b8d3-9176e2048beb-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.181028 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4jzh\" (UniqueName: \"kubernetes.io/projected/d4f9a31b-650d-46d5-b8d3-9176e2048beb-kube-api-access-t4jzh\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.181475 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5e7c194-8f27-4279-a34b-d8a0a94bdb03-utilities" (OuterVolumeSpecName: "utilities") pod "c5e7c194-8f27-4279-a34b-d8a0a94bdb03" (UID: "c5e7c194-8f27-4279-a34b-d8a0a94bdb03"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.186031 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5e7c194-8f27-4279-a34b-d8a0a94bdb03-kube-api-access-fccbb" (OuterVolumeSpecName: "kube-api-access-fccbb") pod "c5e7c194-8f27-4279-a34b-d8a0a94bdb03" (UID: "c5e7c194-8f27-4279-a34b-d8a0a94bdb03"). InnerVolumeSpecName "kube-api-access-fccbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.233190 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mf25c"] Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.262742 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5e7c194-8f27-4279-a34b-d8a0a94bdb03-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c5e7c194-8f27-4279-a34b-d8a0a94bdb03" (UID: "c5e7c194-8f27-4279-a34b-d8a0a94bdb03"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.282659 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c5e7c194-8f27-4279-a34b-d8a0a94bdb03-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.282695 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c5e7c194-8f27-4279-a34b-d8a0a94bdb03-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.282722 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fccbb\" (UniqueName: \"kubernetes.io/projected/c5e7c194-8f27-4279-a34b-d8a0a94bdb03-kube-api-access-fccbb\") on node \"crc\" DevicePath \"\"" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.714390 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s8wd8" event={"ID":"e73b2237-e967-49cc-9368-c670f2749a60","Type":"ContainerDied","Data":"aaa5379b750591c35e408cfaa0e982f01241ab634298b7b3548d58eabfd406de"} Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.714697 4710 scope.go:117] "RemoveContainer" containerID="76f9289ae01963cc780168d410960301e260f0d8780236909f3a7547dcaba49e" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.715414 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s8wd8" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.717637 4710 generic.go:334] "Generic (PLEG): container finished" podID="c5e7c194-8f27-4279-a34b-d8a0a94bdb03" containerID="2d27b34e227effb30108e30f18f2805fa7ca916fd93618aedbd6f57fd87b6afd" exitCode=0 Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.717743 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l7l4g" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.718006 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l7l4g" event={"ID":"c5e7c194-8f27-4279-a34b-d8a0a94bdb03","Type":"ContainerDied","Data":"2d27b34e227effb30108e30f18f2805fa7ca916fd93618aedbd6f57fd87b6afd"} Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.718066 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l7l4g" event={"ID":"c5e7c194-8f27-4279-a34b-d8a0a94bdb03","Type":"ContainerDied","Data":"23c4740dcbca2fbdfa699968458d28ce944df17887322deaa1e8d59420eba6bd"} Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.725900 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q9t84" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.725950 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q9t84" event={"ID":"db9cdb3d-9888-46e4-a5db-710577557a80","Type":"ContainerDied","Data":"581a7b5b670186944e56e7eb5293981791f391b020b2e0ce708da812d04f4993"} Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.728978 4710 scope.go:117] "RemoveContainer" containerID="008b594ec08a51f16fb912c232c011d45627cd6425aa9efa0b51de93ed20d73f" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.730060 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bjx77" event={"ID":"5e5a0c99-47a8-46dd-b869-511a979f4b8c","Type":"ContainerDied","Data":"37619aa2f3634d611dad8aebeef7327e23485525996c5439ec7b9d51eb0db4c0"} Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.730158 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bjx77" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.735472 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.735478 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtd" event={"ID":"d4f9a31b-650d-46d5-b8d3-9176e2048beb","Type":"ContainerDied","Data":"e6d43b453337060fdaf8d2cca67fdafba70a12ce88a71daead7da94e3c789dae"} Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.739943 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mf25c" event={"ID":"0f52acde-7961-4866-8e50-2d6839085e4d","Type":"ContainerStarted","Data":"ce8ed803cdd40f0b7820cf265d40bfa6c9bb2665a349f0a3ec7481c862ce5520"} Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.739977 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mf25c" event={"ID":"0f52acde-7961-4866-8e50-2d6839085e4d","Type":"ContainerStarted","Data":"f01c1209461a0d5e96611f315e690d10cf885e9a1dc4534d43c9fc81b6789998"} Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.740481 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-mf25c" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.750503 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s8wd8"] Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.754163 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-s8wd8"] Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.761139 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-mf25c" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.761151 4710 scope.go:117] "RemoveContainer" containerID="90150de71e4bf58f083ada524a239869821b08f7286d7cc16c88f213363cc518" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.784954 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-mf25c" podStartSLOduration=1.784917324 podStartE2EDuration="1.784917324s" podCreationTimestamp="2025-10-09 09:08:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:08:51.772638324 +0000 UTC m=+255.262746720" watchObservedRunningTime="2025-10-09 09:08:51.784917324 +0000 UTC m=+255.275025721" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.788831 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l7l4g"] Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.794969 4710 scope.go:117] "RemoveContainer" containerID="2d27b34e227effb30108e30f18f2805fa7ca916fd93618aedbd6f57fd87b6afd" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.795776 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-l7l4g"] Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.804049 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bjx77"] Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.811530 4710 scope.go:117] "RemoveContainer" containerID="87cba35e4f75d080940d87be4c42b1d77dfae5fac093b4831e0421607118ed37" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.812015 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bjx77"] Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.824510 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7cjtd"] Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.827870 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7cjtd"] Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.832190 4710 scope.go:117] "RemoveContainer" containerID="b6626dce19dbf4baed95399432fcc5f8b174267e8b56caa9b1131f7475e151e5" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.846102 4710 scope.go:117] "RemoveContainer" containerID="2d27b34e227effb30108e30f18f2805fa7ca916fd93618aedbd6f57fd87b6afd" Oct 09 09:08:51 crc kubenswrapper[4710]: E1009 09:08:51.846722 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d27b34e227effb30108e30f18f2805fa7ca916fd93618aedbd6f57fd87b6afd\": container with ID starting with 2d27b34e227effb30108e30f18f2805fa7ca916fd93618aedbd6f57fd87b6afd not found: ID does not exist" containerID="2d27b34e227effb30108e30f18f2805fa7ca916fd93618aedbd6f57fd87b6afd" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.846754 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d27b34e227effb30108e30f18f2805fa7ca916fd93618aedbd6f57fd87b6afd"} err="failed to get container status \"2d27b34e227effb30108e30f18f2805fa7ca916fd93618aedbd6f57fd87b6afd\": rpc error: code = NotFound desc = could not find container \"2d27b34e227effb30108e30f18f2805fa7ca916fd93618aedbd6f57fd87b6afd\": container with ID starting with 2d27b34e227effb30108e30f18f2805fa7ca916fd93618aedbd6f57fd87b6afd not found: ID does not exist" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.846775 4710 scope.go:117] "RemoveContainer" containerID="87cba35e4f75d080940d87be4c42b1d77dfae5fac093b4831e0421607118ed37" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.850629 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q9t84"] Oct 09 09:08:51 crc kubenswrapper[4710]: E1009 09:08:51.850851 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87cba35e4f75d080940d87be4c42b1d77dfae5fac093b4831e0421607118ed37\": container with ID starting with 87cba35e4f75d080940d87be4c42b1d77dfae5fac093b4831e0421607118ed37 not found: ID does not exist" containerID="87cba35e4f75d080940d87be4c42b1d77dfae5fac093b4831e0421607118ed37" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.850885 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87cba35e4f75d080940d87be4c42b1d77dfae5fac093b4831e0421607118ed37"} err="failed to get container status \"87cba35e4f75d080940d87be4c42b1d77dfae5fac093b4831e0421607118ed37\": rpc error: code = NotFound desc = could not find container \"87cba35e4f75d080940d87be4c42b1d77dfae5fac093b4831e0421607118ed37\": container with ID starting with 87cba35e4f75d080940d87be4c42b1d77dfae5fac093b4831e0421607118ed37 not found: ID does not exist" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.850910 4710 scope.go:117] "RemoveContainer" containerID="b6626dce19dbf4baed95399432fcc5f8b174267e8b56caa9b1131f7475e151e5" Oct 09 09:08:51 crc kubenswrapper[4710]: E1009 09:08:51.851133 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6626dce19dbf4baed95399432fcc5f8b174267e8b56caa9b1131f7475e151e5\": container with ID starting with b6626dce19dbf4baed95399432fcc5f8b174267e8b56caa9b1131f7475e151e5 not found: ID does not exist" containerID="b6626dce19dbf4baed95399432fcc5f8b174267e8b56caa9b1131f7475e151e5" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.851157 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6626dce19dbf4baed95399432fcc5f8b174267e8b56caa9b1131f7475e151e5"} err="failed to get container status \"b6626dce19dbf4baed95399432fcc5f8b174267e8b56caa9b1131f7475e151e5\": rpc error: code = NotFound desc = could not find container \"b6626dce19dbf4baed95399432fcc5f8b174267e8b56caa9b1131f7475e151e5\": container with ID starting with b6626dce19dbf4baed95399432fcc5f8b174267e8b56caa9b1131f7475e151e5 not found: ID does not exist" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.851174 4710 scope.go:117] "RemoveContainer" containerID="fbc679583112279e26b84efae67a345de0f3a026947200dc0c6ca908ff49af26" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.856227 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-q9t84"] Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.864228 4710 scope.go:117] "RemoveContainer" containerID="7654e87089f03dbe68f4c532bd124315ae56dc93a9d41bd2b5c2ff1ca49bb5c1" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.882598 4710 scope.go:117] "RemoveContainer" containerID="b0380aa741e3056effb79810f424aabcb02b5baec0e213c44a7845d2a145ee3b" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.895712 4710 scope.go:117] "RemoveContainer" containerID="8ebb4f4d6004dddb43f114514cf441c6a88e67a4914edda0843ef27531bf2455" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.908021 4710 scope.go:117] "RemoveContainer" containerID="9cd990960171b195bf93481fa704d1dc5223ca99aef3ce3817a367e12e5982e4" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.919692 4710 scope.go:117] "RemoveContainer" containerID="8ab9a6c2cf5e7823006d380514d28b03659003cd3d1823aa387545547c5806b8" Oct 09 09:08:51 crc kubenswrapper[4710]: I1009 09:08:51.933200 4710 scope.go:117] "RemoveContainer" containerID="f0c56a7faa368cd4a10cb5973739a67a465e06b0ed25059fec2f0c6e927826ef" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.689370 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rtjsg"] Oct 09 09:08:52 crc kubenswrapper[4710]: E1009 09:08:52.689587 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e5a0c99-47a8-46dd-b869-511a979f4b8c" containerName="extract-utilities" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.689604 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e5a0c99-47a8-46dd-b869-511a979f4b8c" containerName="extract-utilities" Oct 09 09:08:52 crc kubenswrapper[4710]: E1009 09:08:52.689617 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e73b2237-e967-49cc-9368-c670f2749a60" containerName="extract-content" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.689623 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="e73b2237-e967-49cc-9368-c670f2749a60" containerName="extract-content" Oct 09 09:08:52 crc kubenswrapper[4710]: E1009 09:08:52.689631 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4f9a31b-650d-46d5-b8d3-9176e2048beb" containerName="marketplace-operator" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.689637 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4f9a31b-650d-46d5-b8d3-9176e2048beb" containerName="marketplace-operator" Oct 09 09:08:52 crc kubenswrapper[4710]: E1009 09:08:52.689646 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5e7c194-8f27-4279-a34b-d8a0a94bdb03" containerName="extract-content" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.689652 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5e7c194-8f27-4279-a34b-d8a0a94bdb03" containerName="extract-content" Oct 09 09:08:52 crc kubenswrapper[4710]: E1009 09:08:52.689660 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5e7c194-8f27-4279-a34b-d8a0a94bdb03" containerName="registry-server" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.689665 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5e7c194-8f27-4279-a34b-d8a0a94bdb03" containerName="registry-server" Oct 09 09:08:52 crc kubenswrapper[4710]: E1009 09:08:52.689670 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e73b2237-e967-49cc-9368-c670f2749a60" containerName="registry-server" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.689676 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="e73b2237-e967-49cc-9368-c670f2749a60" containerName="registry-server" Oct 09 09:08:52 crc kubenswrapper[4710]: E1009 09:08:52.689684 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db9cdb3d-9888-46e4-a5db-710577557a80" containerName="registry-server" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.689690 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="db9cdb3d-9888-46e4-a5db-710577557a80" containerName="registry-server" Oct 09 09:08:52 crc kubenswrapper[4710]: E1009 09:08:52.689697 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e5a0c99-47a8-46dd-b869-511a979f4b8c" containerName="extract-content" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.689702 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e5a0c99-47a8-46dd-b869-511a979f4b8c" containerName="extract-content" Oct 09 09:08:52 crc kubenswrapper[4710]: E1009 09:08:52.689710 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e73b2237-e967-49cc-9368-c670f2749a60" containerName="extract-utilities" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.689715 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="e73b2237-e967-49cc-9368-c670f2749a60" containerName="extract-utilities" Oct 09 09:08:52 crc kubenswrapper[4710]: E1009 09:08:52.689724 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5e7c194-8f27-4279-a34b-d8a0a94bdb03" containerName="extract-utilities" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.689729 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5e7c194-8f27-4279-a34b-d8a0a94bdb03" containerName="extract-utilities" Oct 09 09:08:52 crc kubenswrapper[4710]: E1009 09:08:52.689735 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db9cdb3d-9888-46e4-a5db-710577557a80" containerName="extract-utilities" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.689740 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="db9cdb3d-9888-46e4-a5db-710577557a80" containerName="extract-utilities" Oct 09 09:08:52 crc kubenswrapper[4710]: E1009 09:08:52.689746 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db9cdb3d-9888-46e4-a5db-710577557a80" containerName="extract-content" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.689751 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="db9cdb3d-9888-46e4-a5db-710577557a80" containerName="extract-content" Oct 09 09:08:52 crc kubenswrapper[4710]: E1009 09:08:52.689759 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e5a0c99-47a8-46dd-b869-511a979f4b8c" containerName="registry-server" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.689764 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e5a0c99-47a8-46dd-b869-511a979f4b8c" containerName="registry-server" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.689845 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5e7c194-8f27-4279-a34b-d8a0a94bdb03" containerName="registry-server" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.689855 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="e73b2237-e967-49cc-9368-c670f2749a60" containerName="registry-server" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.689866 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4f9a31b-650d-46d5-b8d3-9176e2048beb" containerName="marketplace-operator" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.689872 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="db9cdb3d-9888-46e4-a5db-710577557a80" containerName="registry-server" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.689878 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e5a0c99-47a8-46dd-b869-511a979f4b8c" containerName="registry-server" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.690514 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rtjsg" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.695822 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.703503 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rtjsg"] Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.799449 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6750787-1707-42c3-9e91-949cfcf33699-utilities\") pod \"redhat-marketplace-rtjsg\" (UID: \"b6750787-1707-42c3-9e91-949cfcf33699\") " pod="openshift-marketplace/redhat-marketplace-rtjsg" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.799581 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6750787-1707-42c3-9e91-949cfcf33699-catalog-content\") pod \"redhat-marketplace-rtjsg\" (UID: \"b6750787-1707-42c3-9e91-949cfcf33699\") " pod="openshift-marketplace/redhat-marketplace-rtjsg" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.799610 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vct5c\" (UniqueName: \"kubernetes.io/projected/b6750787-1707-42c3-9e91-949cfcf33699-kube-api-access-vct5c\") pod \"redhat-marketplace-rtjsg\" (UID: \"b6750787-1707-42c3-9e91-949cfcf33699\") " pod="openshift-marketplace/redhat-marketplace-rtjsg" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.820190 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e5a0c99-47a8-46dd-b869-511a979f4b8c" path="/var/lib/kubelet/pods/5e5a0c99-47a8-46dd-b869-511a979f4b8c/volumes" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.820788 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5e7c194-8f27-4279-a34b-d8a0a94bdb03" path="/var/lib/kubelet/pods/c5e7c194-8f27-4279-a34b-d8a0a94bdb03/volumes" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.821370 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4f9a31b-650d-46d5-b8d3-9176e2048beb" path="/var/lib/kubelet/pods/d4f9a31b-650d-46d5-b8d3-9176e2048beb/volumes" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.822194 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db9cdb3d-9888-46e4-a5db-710577557a80" path="/var/lib/kubelet/pods/db9cdb3d-9888-46e4-a5db-710577557a80/volumes" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.822745 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e73b2237-e967-49cc-9368-c670f2749a60" path="/var/lib/kubelet/pods/e73b2237-e967-49cc-9368-c670f2749a60/volumes" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.892947 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-97w7k"] Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.893833 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-97w7k" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.899961 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.901730 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6750787-1707-42c3-9e91-949cfcf33699-utilities\") pod \"redhat-marketplace-rtjsg\" (UID: \"b6750787-1707-42c3-9e91-949cfcf33699\") " pod="openshift-marketplace/redhat-marketplace-rtjsg" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.901894 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6750787-1707-42c3-9e91-949cfcf33699-catalog-content\") pod \"redhat-marketplace-rtjsg\" (UID: \"b6750787-1707-42c3-9e91-949cfcf33699\") " pod="openshift-marketplace/redhat-marketplace-rtjsg" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.901944 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vct5c\" (UniqueName: \"kubernetes.io/projected/b6750787-1707-42c3-9e91-949cfcf33699-kube-api-access-vct5c\") pod \"redhat-marketplace-rtjsg\" (UID: \"b6750787-1707-42c3-9e91-949cfcf33699\") " pod="openshift-marketplace/redhat-marketplace-rtjsg" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.902912 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6750787-1707-42c3-9e91-949cfcf33699-catalog-content\") pod \"redhat-marketplace-rtjsg\" (UID: \"b6750787-1707-42c3-9e91-949cfcf33699\") " pod="openshift-marketplace/redhat-marketplace-rtjsg" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.903162 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6750787-1707-42c3-9e91-949cfcf33699-utilities\") pod \"redhat-marketplace-rtjsg\" (UID: \"b6750787-1707-42c3-9e91-949cfcf33699\") " pod="openshift-marketplace/redhat-marketplace-rtjsg" Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.919083 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-97w7k"] Oct 09 09:08:52 crc kubenswrapper[4710]: I1009 09:08:52.934718 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vct5c\" (UniqueName: \"kubernetes.io/projected/b6750787-1707-42c3-9e91-949cfcf33699-kube-api-access-vct5c\") pod \"redhat-marketplace-rtjsg\" (UID: \"b6750787-1707-42c3-9e91-949cfcf33699\") " pod="openshift-marketplace/redhat-marketplace-rtjsg" Oct 09 09:08:53 crc kubenswrapper[4710]: I1009 09:08:53.003258 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hg6q\" (UniqueName: \"kubernetes.io/projected/9cbe3f65-19a3-4145-94a8-4434ee92178f-kube-api-access-8hg6q\") pod \"redhat-operators-97w7k\" (UID: \"9cbe3f65-19a3-4145-94a8-4434ee92178f\") " pod="openshift-marketplace/redhat-operators-97w7k" Oct 09 09:08:53 crc kubenswrapper[4710]: I1009 09:08:53.003367 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cbe3f65-19a3-4145-94a8-4434ee92178f-utilities\") pod \"redhat-operators-97w7k\" (UID: \"9cbe3f65-19a3-4145-94a8-4434ee92178f\") " pod="openshift-marketplace/redhat-operators-97w7k" Oct 09 09:08:53 crc kubenswrapper[4710]: I1009 09:08:53.003404 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cbe3f65-19a3-4145-94a8-4434ee92178f-catalog-content\") pod \"redhat-operators-97w7k\" (UID: \"9cbe3f65-19a3-4145-94a8-4434ee92178f\") " pod="openshift-marketplace/redhat-operators-97w7k" Oct 09 09:08:53 crc kubenswrapper[4710]: I1009 09:08:53.016208 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rtjsg" Oct 09 09:08:53 crc kubenswrapper[4710]: I1009 09:08:53.104387 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hg6q\" (UniqueName: \"kubernetes.io/projected/9cbe3f65-19a3-4145-94a8-4434ee92178f-kube-api-access-8hg6q\") pod \"redhat-operators-97w7k\" (UID: \"9cbe3f65-19a3-4145-94a8-4434ee92178f\") " pod="openshift-marketplace/redhat-operators-97w7k" Oct 09 09:08:53 crc kubenswrapper[4710]: I1009 09:08:53.104677 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cbe3f65-19a3-4145-94a8-4434ee92178f-utilities\") pod \"redhat-operators-97w7k\" (UID: \"9cbe3f65-19a3-4145-94a8-4434ee92178f\") " pod="openshift-marketplace/redhat-operators-97w7k" Oct 09 09:08:53 crc kubenswrapper[4710]: I1009 09:08:53.104707 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cbe3f65-19a3-4145-94a8-4434ee92178f-catalog-content\") pod \"redhat-operators-97w7k\" (UID: \"9cbe3f65-19a3-4145-94a8-4434ee92178f\") " pod="openshift-marketplace/redhat-operators-97w7k" Oct 09 09:08:53 crc kubenswrapper[4710]: I1009 09:08:53.105370 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cbe3f65-19a3-4145-94a8-4434ee92178f-catalog-content\") pod \"redhat-operators-97w7k\" (UID: \"9cbe3f65-19a3-4145-94a8-4434ee92178f\") " pod="openshift-marketplace/redhat-operators-97w7k" Oct 09 09:08:53 crc kubenswrapper[4710]: I1009 09:08:53.105406 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cbe3f65-19a3-4145-94a8-4434ee92178f-utilities\") pod \"redhat-operators-97w7k\" (UID: \"9cbe3f65-19a3-4145-94a8-4434ee92178f\") " pod="openshift-marketplace/redhat-operators-97w7k" Oct 09 09:08:53 crc kubenswrapper[4710]: I1009 09:08:53.126163 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hg6q\" (UniqueName: \"kubernetes.io/projected/9cbe3f65-19a3-4145-94a8-4434ee92178f-kube-api-access-8hg6q\") pod \"redhat-operators-97w7k\" (UID: \"9cbe3f65-19a3-4145-94a8-4434ee92178f\") " pod="openshift-marketplace/redhat-operators-97w7k" Oct 09 09:08:53 crc kubenswrapper[4710]: I1009 09:08:53.196725 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rtjsg"] Oct 09 09:08:53 crc kubenswrapper[4710]: I1009 09:08:53.225257 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-97w7k" Oct 09 09:08:53 crc kubenswrapper[4710]: I1009 09:08:53.410183 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-97w7k"] Oct 09 09:08:53 crc kubenswrapper[4710]: W1009 09:08:53.412809 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9cbe3f65_19a3_4145_94a8_4434ee92178f.slice/crio-55df25f3ff069ae8256a2a6f0c2ec6b38439556fd7e1f73ff36284de2cd75f9a WatchSource:0}: Error finding container 55df25f3ff069ae8256a2a6f0c2ec6b38439556fd7e1f73ff36284de2cd75f9a: Status 404 returned error can't find the container with id 55df25f3ff069ae8256a2a6f0c2ec6b38439556fd7e1f73ff36284de2cd75f9a Oct 09 09:08:53 crc kubenswrapper[4710]: I1009 09:08:53.758399 4710 generic.go:334] "Generic (PLEG): container finished" podID="9cbe3f65-19a3-4145-94a8-4434ee92178f" containerID="d9bea37702f123b31f5af27d0d690f8bb3bacf9da540b4ed7adffcf3c021fed8" exitCode=0 Oct 09 09:08:53 crc kubenswrapper[4710]: I1009 09:08:53.758501 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97w7k" event={"ID":"9cbe3f65-19a3-4145-94a8-4434ee92178f","Type":"ContainerDied","Data":"d9bea37702f123b31f5af27d0d690f8bb3bacf9da540b4ed7adffcf3c021fed8"} Oct 09 09:08:53 crc kubenswrapper[4710]: I1009 09:08:53.758531 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97w7k" event={"ID":"9cbe3f65-19a3-4145-94a8-4434ee92178f","Type":"ContainerStarted","Data":"55df25f3ff069ae8256a2a6f0c2ec6b38439556fd7e1f73ff36284de2cd75f9a"} Oct 09 09:08:53 crc kubenswrapper[4710]: I1009 09:08:53.761199 4710 generic.go:334] "Generic (PLEG): container finished" podID="b6750787-1707-42c3-9e91-949cfcf33699" containerID="e1428936f521906914c52f8df2da670a52c297bd198de9632c224e9b3f4a8572" exitCode=0 Oct 09 09:08:53 crc kubenswrapper[4710]: I1009 09:08:53.761284 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rtjsg" event={"ID":"b6750787-1707-42c3-9e91-949cfcf33699","Type":"ContainerDied","Data":"e1428936f521906914c52f8df2da670a52c297bd198de9632c224e9b3f4a8572"} Oct 09 09:08:53 crc kubenswrapper[4710]: I1009 09:08:53.761347 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rtjsg" event={"ID":"b6750787-1707-42c3-9e91-949cfcf33699","Type":"ContainerStarted","Data":"69f700dccbbea7bc9925580c1e3f7cd3438a98aa62a49eb5764827ea993717a2"} Oct 09 09:08:54 crc kubenswrapper[4710]: I1009 09:08:54.772344 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97w7k" event={"ID":"9cbe3f65-19a3-4145-94a8-4434ee92178f","Type":"ContainerStarted","Data":"8a14cc4a10b190bf61886b6380fb8eb442201e74742d3e49ef30f1735844d478"} Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.095325 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vqq8h"] Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.096942 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vqq8h" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.099613 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.103608 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vqq8h"] Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.233970 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ddzb\" (UniqueName: \"kubernetes.io/projected/19faa1c0-84d5-4b45-969b-c9524eee6e56-kube-api-access-5ddzb\") pod \"community-operators-vqq8h\" (UID: \"19faa1c0-84d5-4b45-969b-c9524eee6e56\") " pod="openshift-marketplace/community-operators-vqq8h" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.234511 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19faa1c0-84d5-4b45-969b-c9524eee6e56-utilities\") pod \"community-operators-vqq8h\" (UID: \"19faa1c0-84d5-4b45-969b-c9524eee6e56\") " pod="openshift-marketplace/community-operators-vqq8h" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.234623 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19faa1c0-84d5-4b45-969b-c9524eee6e56-catalog-content\") pod \"community-operators-vqq8h\" (UID: \"19faa1c0-84d5-4b45-969b-c9524eee6e56\") " pod="openshift-marketplace/community-operators-vqq8h" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.293007 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gfskr"] Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.295060 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gfskr" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.299458 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.302724 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gfskr"] Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.336334 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ddzb\" (UniqueName: \"kubernetes.io/projected/19faa1c0-84d5-4b45-969b-c9524eee6e56-kube-api-access-5ddzb\") pod \"community-operators-vqq8h\" (UID: \"19faa1c0-84d5-4b45-969b-c9524eee6e56\") " pod="openshift-marketplace/community-operators-vqq8h" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.336401 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19faa1c0-84d5-4b45-969b-c9524eee6e56-utilities\") pod \"community-operators-vqq8h\" (UID: \"19faa1c0-84d5-4b45-969b-c9524eee6e56\") " pod="openshift-marketplace/community-operators-vqq8h" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.336511 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19faa1c0-84d5-4b45-969b-c9524eee6e56-catalog-content\") pod \"community-operators-vqq8h\" (UID: \"19faa1c0-84d5-4b45-969b-c9524eee6e56\") " pod="openshift-marketplace/community-operators-vqq8h" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.337019 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/19faa1c0-84d5-4b45-969b-c9524eee6e56-catalog-content\") pod \"community-operators-vqq8h\" (UID: \"19faa1c0-84d5-4b45-969b-c9524eee6e56\") " pod="openshift-marketplace/community-operators-vqq8h" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.337660 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/19faa1c0-84d5-4b45-969b-c9524eee6e56-utilities\") pod \"community-operators-vqq8h\" (UID: \"19faa1c0-84d5-4b45-969b-c9524eee6e56\") " pod="openshift-marketplace/community-operators-vqq8h" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.353869 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ddzb\" (UniqueName: \"kubernetes.io/projected/19faa1c0-84d5-4b45-969b-c9524eee6e56-kube-api-access-5ddzb\") pod \"community-operators-vqq8h\" (UID: \"19faa1c0-84d5-4b45-969b-c9524eee6e56\") " pod="openshift-marketplace/community-operators-vqq8h" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.421683 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vqq8h" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.437881 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f63018af-034e-4f0a-ab7e-fd508ba8bd25-catalog-content\") pod \"certified-operators-gfskr\" (UID: \"f63018af-034e-4f0a-ab7e-fd508ba8bd25\") " pod="openshift-marketplace/certified-operators-gfskr" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.437948 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f63018af-034e-4f0a-ab7e-fd508ba8bd25-utilities\") pod \"certified-operators-gfskr\" (UID: \"f63018af-034e-4f0a-ab7e-fd508ba8bd25\") " pod="openshift-marketplace/certified-operators-gfskr" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.437984 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgv9t\" (UniqueName: \"kubernetes.io/projected/f63018af-034e-4f0a-ab7e-fd508ba8bd25-kube-api-access-sgv9t\") pod \"certified-operators-gfskr\" (UID: \"f63018af-034e-4f0a-ab7e-fd508ba8bd25\") " pod="openshift-marketplace/certified-operators-gfskr" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.539776 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f63018af-034e-4f0a-ab7e-fd508ba8bd25-utilities\") pod \"certified-operators-gfskr\" (UID: \"f63018af-034e-4f0a-ab7e-fd508ba8bd25\") " pod="openshift-marketplace/certified-operators-gfskr" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.540010 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgv9t\" (UniqueName: \"kubernetes.io/projected/f63018af-034e-4f0a-ab7e-fd508ba8bd25-kube-api-access-sgv9t\") pod \"certified-operators-gfskr\" (UID: \"f63018af-034e-4f0a-ab7e-fd508ba8bd25\") " pod="openshift-marketplace/certified-operators-gfskr" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.540106 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f63018af-034e-4f0a-ab7e-fd508ba8bd25-catalog-content\") pod \"certified-operators-gfskr\" (UID: \"f63018af-034e-4f0a-ab7e-fd508ba8bd25\") " pod="openshift-marketplace/certified-operators-gfskr" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.540600 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f63018af-034e-4f0a-ab7e-fd508ba8bd25-catalog-content\") pod \"certified-operators-gfskr\" (UID: \"f63018af-034e-4f0a-ab7e-fd508ba8bd25\") " pod="openshift-marketplace/certified-operators-gfskr" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.540680 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f63018af-034e-4f0a-ab7e-fd508ba8bd25-utilities\") pod \"certified-operators-gfskr\" (UID: \"f63018af-034e-4f0a-ab7e-fd508ba8bd25\") " pod="openshift-marketplace/certified-operators-gfskr" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.558943 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgv9t\" (UniqueName: \"kubernetes.io/projected/f63018af-034e-4f0a-ab7e-fd508ba8bd25-kube-api-access-sgv9t\") pod \"certified-operators-gfskr\" (UID: \"f63018af-034e-4f0a-ab7e-fd508ba8bd25\") " pod="openshift-marketplace/certified-operators-gfskr" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.609988 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gfskr" Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.612831 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vqq8h"] Oct 09 09:08:55 crc kubenswrapper[4710]: W1009 09:08:55.621546 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19faa1c0_84d5_4b45_969b_c9524eee6e56.slice/crio-aeee72c56f8cce8b0af9c2b74da4674f556c33c9347cad1dc1b3241267bec236 WatchSource:0}: Error finding container aeee72c56f8cce8b0af9c2b74da4674f556c33c9347cad1dc1b3241267bec236: Status 404 returned error can't find the container with id aeee72c56f8cce8b0af9c2b74da4674f556c33c9347cad1dc1b3241267bec236 Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.780399 4710 generic.go:334] "Generic (PLEG): container finished" podID="19faa1c0-84d5-4b45-969b-c9524eee6e56" containerID="240af26d460f4a7a7789b8abb738ebf8e905080cc9e950e0fe7e3fd4b88302b3" exitCode=0 Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.780515 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vqq8h" event={"ID":"19faa1c0-84d5-4b45-969b-c9524eee6e56","Type":"ContainerDied","Data":"240af26d460f4a7a7789b8abb738ebf8e905080cc9e950e0fe7e3fd4b88302b3"} Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.780583 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vqq8h" event={"ID":"19faa1c0-84d5-4b45-969b-c9524eee6e56","Type":"ContainerStarted","Data":"aeee72c56f8cce8b0af9c2b74da4674f556c33c9347cad1dc1b3241267bec236"} Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.785195 4710 generic.go:334] "Generic (PLEG): container finished" podID="9cbe3f65-19a3-4145-94a8-4434ee92178f" containerID="8a14cc4a10b190bf61886b6380fb8eb442201e74742d3e49ef30f1735844d478" exitCode=0 Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.785256 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97w7k" event={"ID":"9cbe3f65-19a3-4145-94a8-4434ee92178f","Type":"ContainerDied","Data":"8a14cc4a10b190bf61886b6380fb8eb442201e74742d3e49ef30f1735844d478"} Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.788455 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gfskr"] Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.797255 4710 generic.go:334] "Generic (PLEG): container finished" podID="b6750787-1707-42c3-9e91-949cfcf33699" containerID="d600232c359b989e367babb093271ff4ad5431030ac0349f9c21783dffb24876" exitCode=0 Oct 09 09:08:55 crc kubenswrapper[4710]: I1009 09:08:55.797333 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rtjsg" event={"ID":"b6750787-1707-42c3-9e91-949cfcf33699","Type":"ContainerDied","Data":"d600232c359b989e367babb093271ff4ad5431030ac0349f9c21783dffb24876"} Oct 09 09:08:55 crc kubenswrapper[4710]: W1009 09:08:55.800761 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf63018af_034e_4f0a_ab7e_fd508ba8bd25.slice/crio-e92284d19a33ef9c3246e761911345759a23e531fabbc09cf3d9a47ecda38bf7 WatchSource:0}: Error finding container e92284d19a33ef9c3246e761911345759a23e531fabbc09cf3d9a47ecda38bf7: Status 404 returned error can't find the container with id e92284d19a33ef9c3246e761911345759a23e531fabbc09cf3d9a47ecda38bf7 Oct 09 09:08:56 crc kubenswrapper[4710]: I1009 09:08:56.805790 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97w7k" event={"ID":"9cbe3f65-19a3-4145-94a8-4434ee92178f","Type":"ContainerStarted","Data":"fb849c6019f4a534161b86386279ca16fdb2a4d99838fd2e15bf2034f0f69c6d"} Oct 09 09:08:56 crc kubenswrapper[4710]: I1009 09:08:56.809424 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rtjsg" event={"ID":"b6750787-1707-42c3-9e91-949cfcf33699","Type":"ContainerStarted","Data":"63de3232b1fa317321eebb98d423788e7c2fccf20c6ecce7d896432c69d3779a"} Oct 09 09:08:56 crc kubenswrapper[4710]: I1009 09:08:56.810806 4710 generic.go:334] "Generic (PLEG): container finished" podID="f63018af-034e-4f0a-ab7e-fd508ba8bd25" containerID="27bd4c045db5b0bce2e27b98bd296da3462b8accc944c65a5415854407912c15" exitCode=0 Oct 09 09:08:56 crc kubenswrapper[4710]: I1009 09:08:56.810879 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gfskr" event={"ID":"f63018af-034e-4f0a-ab7e-fd508ba8bd25","Type":"ContainerDied","Data":"27bd4c045db5b0bce2e27b98bd296da3462b8accc944c65a5415854407912c15"} Oct 09 09:08:56 crc kubenswrapper[4710]: I1009 09:08:56.810906 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gfskr" event={"ID":"f63018af-034e-4f0a-ab7e-fd508ba8bd25","Type":"ContainerStarted","Data":"e92284d19a33ef9c3246e761911345759a23e531fabbc09cf3d9a47ecda38bf7"} Oct 09 09:08:56 crc kubenswrapper[4710]: I1009 09:08:56.823078 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vqq8h" event={"ID":"19faa1c0-84d5-4b45-969b-c9524eee6e56","Type":"ContainerStarted","Data":"e08af71aacc52e0acce975714df3bbec8f1dcda52a4e6d4931dcabd7c3672881"} Oct 09 09:08:56 crc kubenswrapper[4710]: I1009 09:08:56.829624 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-97w7k" podStartSLOduration=2.116156272 podStartE2EDuration="4.829605305s" podCreationTimestamp="2025-10-09 09:08:52 +0000 UTC" firstStartedPulling="2025-10-09 09:08:53.760388577 +0000 UTC m=+257.250496973" lastFinishedPulling="2025-10-09 09:08:56.473837609 +0000 UTC m=+259.963946006" observedRunningTime="2025-10-09 09:08:56.826075639 +0000 UTC m=+260.316184036" watchObservedRunningTime="2025-10-09 09:08:56.829605305 +0000 UTC m=+260.319713703" Oct 09 09:08:56 crc kubenswrapper[4710]: I1009 09:08:56.884737 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rtjsg" podStartSLOduration=2.205551499 podStartE2EDuration="4.884706276s" podCreationTimestamp="2025-10-09 09:08:52 +0000 UTC" firstStartedPulling="2025-10-09 09:08:53.764461509 +0000 UTC m=+257.254569905" lastFinishedPulling="2025-10-09 09:08:56.443616285 +0000 UTC m=+259.933724682" observedRunningTime="2025-10-09 09:08:56.882511572 +0000 UTC m=+260.372619969" watchObservedRunningTime="2025-10-09 09:08:56.884706276 +0000 UTC m=+260.374814673" Oct 09 09:08:57 crc kubenswrapper[4710]: I1009 09:08:57.827657 4710 generic.go:334] "Generic (PLEG): container finished" podID="19faa1c0-84d5-4b45-969b-c9524eee6e56" containerID="e08af71aacc52e0acce975714df3bbec8f1dcda52a4e6d4931dcabd7c3672881" exitCode=0 Oct 09 09:08:57 crc kubenswrapper[4710]: I1009 09:08:57.827717 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vqq8h" event={"ID":"19faa1c0-84d5-4b45-969b-c9524eee6e56","Type":"ContainerDied","Data":"e08af71aacc52e0acce975714df3bbec8f1dcda52a4e6d4931dcabd7c3672881"} Oct 09 09:08:58 crc kubenswrapper[4710]: I1009 09:08:58.857724 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vqq8h" event={"ID":"19faa1c0-84d5-4b45-969b-c9524eee6e56","Type":"ContainerStarted","Data":"3e51ce8e19e1e4b29ecbf035d5bf287500e7edc8b0589d64930267a22723419f"} Oct 09 09:08:58 crc kubenswrapper[4710]: I1009 09:08:58.865934 4710 generic.go:334] "Generic (PLEG): container finished" podID="f63018af-034e-4f0a-ab7e-fd508ba8bd25" containerID="0ae5bb4cd11c55dcdc2d7041b0b0f18c427f70e278ba8a8334fec7bdbf53475b" exitCode=0 Oct 09 09:08:58 crc kubenswrapper[4710]: I1009 09:08:58.866075 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gfskr" event={"ID":"f63018af-034e-4f0a-ab7e-fd508ba8bd25","Type":"ContainerDied","Data":"0ae5bb4cd11c55dcdc2d7041b0b0f18c427f70e278ba8a8334fec7bdbf53475b"} Oct 09 09:08:58 crc kubenswrapper[4710]: I1009 09:08:58.879583 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vqq8h" podStartSLOduration=1.119405866 podStartE2EDuration="3.879564392s" podCreationTimestamp="2025-10-09 09:08:55 +0000 UTC" firstStartedPulling="2025-10-09 09:08:55.785196152 +0000 UTC m=+259.275304550" lastFinishedPulling="2025-10-09 09:08:58.545354679 +0000 UTC m=+262.035463076" observedRunningTime="2025-10-09 09:08:58.878758039 +0000 UTC m=+262.368866436" watchObservedRunningTime="2025-10-09 09:08:58.879564392 +0000 UTC m=+262.369672789" Oct 09 09:08:59 crc kubenswrapper[4710]: I1009 09:08:59.873539 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gfskr" event={"ID":"f63018af-034e-4f0a-ab7e-fd508ba8bd25","Type":"ContainerStarted","Data":"30c10dd93e41ec2b140b8217af2ade2ff92ecb6df819ceedc94ff5d5b17daf6c"} Oct 09 09:08:59 crc kubenswrapper[4710]: I1009 09:08:59.901291 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gfskr" podStartSLOduration=2.367660046 podStartE2EDuration="4.901272568s" podCreationTimestamp="2025-10-09 09:08:55 +0000 UTC" firstStartedPulling="2025-10-09 09:08:56.81234622 +0000 UTC m=+260.302454618" lastFinishedPulling="2025-10-09 09:08:59.345958743 +0000 UTC m=+262.836067140" observedRunningTime="2025-10-09 09:08:59.898205824 +0000 UTC m=+263.388314221" watchObservedRunningTime="2025-10-09 09:08:59.901272568 +0000 UTC m=+263.391380965" Oct 09 09:09:03 crc kubenswrapper[4710]: I1009 09:09:03.017367 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rtjsg" Oct 09 09:09:03 crc kubenswrapper[4710]: I1009 09:09:03.018773 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rtjsg" Oct 09 09:09:03 crc kubenswrapper[4710]: I1009 09:09:03.066662 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rtjsg" Oct 09 09:09:03 crc kubenswrapper[4710]: I1009 09:09:03.225850 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-97w7k" Oct 09 09:09:03 crc kubenswrapper[4710]: I1009 09:09:03.226408 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-97w7k" Oct 09 09:09:03 crc kubenswrapper[4710]: I1009 09:09:03.256557 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-97w7k" Oct 09 09:09:03 crc kubenswrapper[4710]: I1009 09:09:03.937839 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rtjsg" Oct 09 09:09:03 crc kubenswrapper[4710]: I1009 09:09:03.940361 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-97w7k" Oct 09 09:09:05 crc kubenswrapper[4710]: I1009 09:09:05.422621 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vqq8h" Oct 09 09:09:05 crc kubenswrapper[4710]: I1009 09:09:05.423197 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vqq8h" Oct 09 09:09:05 crc kubenswrapper[4710]: I1009 09:09:05.458868 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vqq8h" Oct 09 09:09:05 crc kubenswrapper[4710]: I1009 09:09:05.611514 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gfskr" Oct 09 09:09:05 crc kubenswrapper[4710]: I1009 09:09:05.611594 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gfskr" Oct 09 09:09:05 crc kubenswrapper[4710]: I1009 09:09:05.652127 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gfskr" Oct 09 09:09:05 crc kubenswrapper[4710]: I1009 09:09:05.943155 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gfskr" Oct 09 09:09:05 crc kubenswrapper[4710]: I1009 09:09:05.943217 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vqq8h" Oct 09 09:10:05 crc kubenswrapper[4710]: I1009 09:10:05.546344 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:10:05 crc kubenswrapper[4710]: I1009 09:10:05.547279 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:10:35 crc kubenswrapper[4710]: I1009 09:10:35.546419 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:10:35 crc kubenswrapper[4710]: I1009 09:10:35.547086 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:11:05 crc kubenswrapper[4710]: I1009 09:11:05.546071 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:11:05 crc kubenswrapper[4710]: I1009 09:11:05.546764 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:11:05 crc kubenswrapper[4710]: I1009 09:11:05.546814 4710 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:11:05 crc kubenswrapper[4710]: I1009 09:11:05.547481 4710 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c7e7af49e6c1ca0ce0353e8934a08e6c28c703255882b4f40e6190d8a146137c"} pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 09:11:05 crc kubenswrapper[4710]: I1009 09:11:05.547533 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" containerID="cri-o://c7e7af49e6c1ca0ce0353e8934a08e6c28c703255882b4f40e6190d8a146137c" gracePeriod=600 Oct 09 09:11:06 crc kubenswrapper[4710]: I1009 09:11:06.479774 4710 generic.go:334] "Generic (PLEG): container finished" podID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerID="c7e7af49e6c1ca0ce0353e8934a08e6c28c703255882b4f40e6190d8a146137c" exitCode=0 Oct 09 09:11:06 crc kubenswrapper[4710]: I1009 09:11:06.479859 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerDied","Data":"c7e7af49e6c1ca0ce0353e8934a08e6c28c703255882b4f40e6190d8a146137c"} Oct 09 09:11:06 crc kubenswrapper[4710]: I1009 09:11:06.480159 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerStarted","Data":"67796ccaaae375d049d846cb98a7d56281b37ddbf6ec7f175f6fa0bd1c54ba36"} Oct 09 09:11:06 crc kubenswrapper[4710]: I1009 09:11:06.480187 4710 scope.go:117] "RemoveContainer" containerID="003fab8ff82f188ec929bd9df03c508ac8532933c2e0304f29ba4975eeea6b4a" Oct 09 09:11:32 crc kubenswrapper[4710]: I1009 09:11:32.998829 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-whtj5"] Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:32.999886 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.010752 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-whtj5"] Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.186277 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/1f5b5aef-3f3e-45bf-9826-646c1d96b61e-registry-tls\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.186566 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.186695 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/1f5b5aef-3f3e-45bf-9826-646c1d96b61e-registry-certificates\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.186793 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5k4b4\" (UniqueName: \"kubernetes.io/projected/1f5b5aef-3f3e-45bf-9826-646c1d96b61e-kube-api-access-5k4b4\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.186879 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1f5b5aef-3f3e-45bf-9826-646c1d96b61e-bound-sa-token\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.186976 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/1f5b5aef-3f3e-45bf-9826-646c1d96b61e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.187040 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/1f5b5aef-3f3e-45bf-9826-646c1d96b61e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.187087 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1f5b5aef-3f3e-45bf-9826-646c1d96b61e-trusted-ca\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.212171 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.288610 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1f5b5aef-3f3e-45bf-9826-646c1d96b61e-bound-sa-token\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.288711 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/1f5b5aef-3f3e-45bf-9826-646c1d96b61e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.288753 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/1f5b5aef-3f3e-45bf-9826-646c1d96b61e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.288783 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1f5b5aef-3f3e-45bf-9826-646c1d96b61e-trusted-ca\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.288821 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/1f5b5aef-3f3e-45bf-9826-646c1d96b61e-registry-tls\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.288934 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/1f5b5aef-3f3e-45bf-9826-646c1d96b61e-registry-certificates\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.288969 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5k4b4\" (UniqueName: \"kubernetes.io/projected/1f5b5aef-3f3e-45bf-9826-646c1d96b61e-kube-api-access-5k4b4\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.289884 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/1f5b5aef-3f3e-45bf-9826-646c1d96b61e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.290595 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1f5b5aef-3f3e-45bf-9826-646c1d96b61e-trusted-ca\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.291195 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/1f5b5aef-3f3e-45bf-9826-646c1d96b61e-registry-certificates\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.295748 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/1f5b5aef-3f3e-45bf-9826-646c1d96b61e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.296627 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/1f5b5aef-3f3e-45bf-9826-646c1d96b61e-registry-tls\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.305087 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5k4b4\" (UniqueName: \"kubernetes.io/projected/1f5b5aef-3f3e-45bf-9826-646c1d96b61e-kube-api-access-5k4b4\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.305862 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1f5b5aef-3f3e-45bf-9826-646c1d96b61e-bound-sa-token\") pod \"image-registry-66df7c8f76-whtj5\" (UID: \"1f5b5aef-3f3e-45bf-9826-646c1d96b61e\") " pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.316896 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.488949 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-whtj5"] Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.622409 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" event={"ID":"1f5b5aef-3f3e-45bf-9826-646c1d96b61e","Type":"ContainerStarted","Data":"194f32814f5d8e32dceedfde1be97c4e71eac7fc76c6f7bac36e21f26d92b20d"} Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.622689 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" event={"ID":"1f5b5aef-3f3e-45bf-9826-646c1d96b61e","Type":"ContainerStarted","Data":"0c071d664e4ef3649539bfa021092116608de67a34c58ef4820ab28d2dcb4628"} Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.622824 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:33 crc kubenswrapper[4710]: I1009 09:11:33.640389 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" podStartSLOduration=1.640360324 podStartE2EDuration="1.640360324s" podCreationTimestamp="2025-10-09 09:11:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:11:33.636714079 +0000 UTC m=+417.126822475" watchObservedRunningTime="2025-10-09 09:11:33.640360324 +0000 UTC m=+417.130468721" Oct 09 09:11:53 crc kubenswrapper[4710]: I1009 09:11:53.323001 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-whtj5" Oct 09 09:11:53 crc kubenswrapper[4710]: I1009 09:11:53.365223 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l9bsw"] Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.392403 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" podUID="51644181-d79b-4704-873f-d3c13740f656" containerName="registry" containerID="cri-o://01d132d91673f04ddecf36917250d44ce4b67af0cbd41706f4afd7596c5b6543" gracePeriod=30 Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.658986 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.832583 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/51644181-d79b-4704-873f-d3c13740f656-ca-trust-extracted\") pod \"51644181-d79b-4704-873f-d3c13740f656\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.832633 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/51644181-d79b-4704-873f-d3c13740f656-registry-tls\") pod \"51644181-d79b-4704-873f-d3c13740f656\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.832657 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/51644181-d79b-4704-873f-d3c13740f656-bound-sa-token\") pod \"51644181-d79b-4704-873f-d3c13740f656\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.832697 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/51644181-d79b-4704-873f-d3c13740f656-trusted-ca\") pod \"51644181-d79b-4704-873f-d3c13740f656\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.832721 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/51644181-d79b-4704-873f-d3c13740f656-registry-certificates\") pod \"51644181-d79b-4704-873f-d3c13740f656\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.833383 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51644181-d79b-4704-873f-d3c13740f656-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "51644181-d79b-4704-873f-d3c13740f656" (UID: "51644181-d79b-4704-873f-d3c13740f656"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.833447 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51644181-d79b-4704-873f-d3c13740f656-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "51644181-d79b-4704-873f-d3c13740f656" (UID: "51644181-d79b-4704-873f-d3c13740f656"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.833675 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"51644181-d79b-4704-873f-d3c13740f656\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.833728 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/51644181-d79b-4704-873f-d3c13740f656-installation-pull-secrets\") pod \"51644181-d79b-4704-873f-d3c13740f656\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.833789 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v5ddr\" (UniqueName: \"kubernetes.io/projected/51644181-d79b-4704-873f-d3c13740f656-kube-api-access-v5ddr\") pod \"51644181-d79b-4704-873f-d3c13740f656\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.834025 4710 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/51644181-d79b-4704-873f-d3c13740f656-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.834042 4710 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/51644181-d79b-4704-873f-d3c13740f656-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.839663 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51644181-d79b-4704-873f-d3c13740f656-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "51644181-d79b-4704-873f-d3c13740f656" (UID: "51644181-d79b-4704-873f-d3c13740f656"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:12:18 crc kubenswrapper[4710]: E1009 09:12:18.839743 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:51644181-d79b-4704-873f-d3c13740f656 nodeName:}" failed. No retries permitted until 2025-10-09 09:12:19.339721333 +0000 UTC m=+462.829829730 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "registry-storage" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "51644181-d79b-4704-873f-d3c13740f656" (UID: "51644181-d79b-4704-873f-d3c13740f656") : kubernetes.io/csi: Unmounter.TearDownAt failed: rpc error: code = Unknown desc = check target path: could not get consistent content of /proc/mounts after 3 attempts Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.840293 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51644181-d79b-4704-873f-d3c13740f656-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "51644181-d79b-4704-873f-d3c13740f656" (UID: "51644181-d79b-4704-873f-d3c13740f656"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.840481 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51644181-d79b-4704-873f-d3c13740f656-kube-api-access-v5ddr" (OuterVolumeSpecName: "kube-api-access-v5ddr") pod "51644181-d79b-4704-873f-d3c13740f656" (UID: "51644181-d79b-4704-873f-d3c13740f656"). InnerVolumeSpecName "kube-api-access-v5ddr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.840693 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51644181-d79b-4704-873f-d3c13740f656-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "51644181-d79b-4704-873f-d3c13740f656" (UID: "51644181-d79b-4704-873f-d3c13740f656"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.849207 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51644181-d79b-4704-873f-d3c13740f656-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "51644181-d79b-4704-873f-d3c13740f656" (UID: "51644181-d79b-4704-873f-d3c13740f656"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.897777 4710 generic.go:334] "Generic (PLEG): container finished" podID="51644181-d79b-4704-873f-d3c13740f656" containerID="01d132d91673f04ddecf36917250d44ce4b67af0cbd41706f4afd7596c5b6543" exitCode=0 Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.897815 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" event={"ID":"51644181-d79b-4704-873f-d3c13740f656","Type":"ContainerDied","Data":"01d132d91673f04ddecf36917250d44ce4b67af0cbd41706f4afd7596c5b6543"} Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.897843 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" event={"ID":"51644181-d79b-4704-873f-d3c13740f656","Type":"ContainerDied","Data":"14decf36ae39e3fdc83a2d0ce358a1e15e5b6a70afc7f28c714e761080d8883f"} Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.897864 4710 scope.go:117] "RemoveContainer" containerID="01d132d91673f04ddecf36917250d44ce4b67af0cbd41706f4afd7596c5b6543" Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.897980 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-l9bsw" Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.911632 4710 scope.go:117] "RemoveContainer" containerID="01d132d91673f04ddecf36917250d44ce4b67af0cbd41706f4afd7596c5b6543" Oct 09 09:12:18 crc kubenswrapper[4710]: E1009 09:12:18.911895 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01d132d91673f04ddecf36917250d44ce4b67af0cbd41706f4afd7596c5b6543\": container with ID starting with 01d132d91673f04ddecf36917250d44ce4b67af0cbd41706f4afd7596c5b6543 not found: ID does not exist" containerID="01d132d91673f04ddecf36917250d44ce4b67af0cbd41706f4afd7596c5b6543" Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.911924 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01d132d91673f04ddecf36917250d44ce4b67af0cbd41706f4afd7596c5b6543"} err="failed to get container status \"01d132d91673f04ddecf36917250d44ce4b67af0cbd41706f4afd7596c5b6543\": rpc error: code = NotFound desc = could not find container \"01d132d91673f04ddecf36917250d44ce4b67af0cbd41706f4afd7596c5b6543\": container with ID starting with 01d132d91673f04ddecf36917250d44ce4b67af0cbd41706f4afd7596c5b6543 not found: ID does not exist" Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.935500 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v5ddr\" (UniqueName: \"kubernetes.io/projected/51644181-d79b-4704-873f-d3c13740f656-kube-api-access-v5ddr\") on node \"crc\" DevicePath \"\"" Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.935518 4710 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/51644181-d79b-4704-873f-d3c13740f656-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.935528 4710 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/51644181-d79b-4704-873f-d3c13740f656-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.935537 4710 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/51644181-d79b-4704-873f-d3c13740f656-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 09 09:12:18 crc kubenswrapper[4710]: I1009 09:12:18.935545 4710 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/51644181-d79b-4704-873f-d3c13740f656-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 09 09:12:19 crc kubenswrapper[4710]: I1009 09:12:19.340590 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"51644181-d79b-4704-873f-d3c13740f656\" (UID: \"51644181-d79b-4704-873f-d3c13740f656\") " Oct 09 09:12:19 crc kubenswrapper[4710]: I1009 09:12:19.347321 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "51644181-d79b-4704-873f-d3c13740f656" (UID: "51644181-d79b-4704-873f-d3c13740f656"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 09 09:12:19 crc kubenswrapper[4710]: I1009 09:12:19.431594 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l9bsw"] Oct 09 09:12:19 crc kubenswrapper[4710]: I1009 09:12:19.434065 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l9bsw"] Oct 09 09:12:20 crc kubenswrapper[4710]: I1009 09:12:20.821851 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51644181-d79b-4704-873f-d3c13740f656" path="/var/lib/kubelet/pods/51644181-d79b-4704-873f-d3c13740f656/volumes" Oct 09 09:13:05 crc kubenswrapper[4710]: I1009 09:13:05.545875 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:13:05 crc kubenswrapper[4710]: I1009 09:13:05.546273 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:13:35 crc kubenswrapper[4710]: I1009 09:13:35.546196 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:13:35 crc kubenswrapper[4710]: I1009 09:13:35.546677 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.371557 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-dx5ll"] Oct 09 09:13:48 crc kubenswrapper[4710]: E1009 09:13:48.372127 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51644181-d79b-4704-873f-d3c13740f656" containerName="registry" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.372140 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="51644181-d79b-4704-873f-d3c13740f656" containerName="registry" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.372237 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="51644181-d79b-4704-873f-d3c13740f656" containerName="registry" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.372561 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-dx5ll" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.384355 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-7mwtc"] Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.387781 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.389043 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.391996 4710 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-hhlkc" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.402294 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-7mwtc"] Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.402379 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-7mwtc" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.404314 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-hgq5g"] Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.404580 4710 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-ttkvc" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.404934 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-hgq5g" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.405638 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-dx5ll"] Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.408817 4710 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-nfvns" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.412483 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-hgq5g"] Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.494186 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hb2vj\" (UniqueName: \"kubernetes.io/projected/1631e6bf-cf27-4c97-ab40-8b2170648070-kube-api-access-hb2vj\") pod \"cert-manager-cainjector-7f985d654d-dx5ll\" (UID: \"1631e6bf-cf27-4c97-ab40-8b2170648070\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-dx5ll" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.494238 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knpmp\" (UniqueName: \"kubernetes.io/projected/4a0738a0-347d-48d6-a47e-52e13d52664d-kube-api-access-knpmp\") pod \"cert-manager-5b446d88c5-7mwtc\" (UID: \"4a0738a0-347d-48d6-a47e-52e13d52664d\") " pod="cert-manager/cert-manager-5b446d88c5-7mwtc" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.494279 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7hxm\" (UniqueName: \"kubernetes.io/projected/878c656c-1bab-4b97-8267-910f8890946a-kube-api-access-g7hxm\") pod \"cert-manager-webhook-5655c58dd6-hgq5g\" (UID: \"878c656c-1bab-4b97-8267-910f8890946a\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-hgq5g" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.595688 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hb2vj\" (UniqueName: \"kubernetes.io/projected/1631e6bf-cf27-4c97-ab40-8b2170648070-kube-api-access-hb2vj\") pod \"cert-manager-cainjector-7f985d654d-dx5ll\" (UID: \"1631e6bf-cf27-4c97-ab40-8b2170648070\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-dx5ll" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.595735 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knpmp\" (UniqueName: \"kubernetes.io/projected/4a0738a0-347d-48d6-a47e-52e13d52664d-kube-api-access-knpmp\") pod \"cert-manager-5b446d88c5-7mwtc\" (UID: \"4a0738a0-347d-48d6-a47e-52e13d52664d\") " pod="cert-manager/cert-manager-5b446d88c5-7mwtc" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.595769 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7hxm\" (UniqueName: \"kubernetes.io/projected/878c656c-1bab-4b97-8267-910f8890946a-kube-api-access-g7hxm\") pod \"cert-manager-webhook-5655c58dd6-hgq5g\" (UID: \"878c656c-1bab-4b97-8267-910f8890946a\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-hgq5g" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.611798 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7hxm\" (UniqueName: \"kubernetes.io/projected/878c656c-1bab-4b97-8267-910f8890946a-kube-api-access-g7hxm\") pod \"cert-manager-webhook-5655c58dd6-hgq5g\" (UID: \"878c656c-1bab-4b97-8267-910f8890946a\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-hgq5g" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.611804 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knpmp\" (UniqueName: \"kubernetes.io/projected/4a0738a0-347d-48d6-a47e-52e13d52664d-kube-api-access-knpmp\") pod \"cert-manager-5b446d88c5-7mwtc\" (UID: \"4a0738a0-347d-48d6-a47e-52e13d52664d\") " pod="cert-manager/cert-manager-5b446d88c5-7mwtc" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.612731 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hb2vj\" (UniqueName: \"kubernetes.io/projected/1631e6bf-cf27-4c97-ab40-8b2170648070-kube-api-access-hb2vj\") pod \"cert-manager-cainjector-7f985d654d-dx5ll\" (UID: \"1631e6bf-cf27-4c97-ab40-8b2170648070\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-dx5ll" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.705688 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-dx5ll" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.716335 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-7mwtc" Oct 09 09:13:48 crc kubenswrapper[4710]: I1009 09:13:48.720366 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-hgq5g" Oct 09 09:13:49 crc kubenswrapper[4710]: I1009 09:13:49.076140 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-dx5ll"] Oct 09 09:13:49 crc kubenswrapper[4710]: I1009 09:13:49.085946 4710 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 09:13:49 crc kubenswrapper[4710]: I1009 09:13:49.106171 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-hgq5g"] Oct 09 09:13:49 crc kubenswrapper[4710]: I1009 09:13:49.108655 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-7mwtc"] Oct 09 09:13:49 crc kubenswrapper[4710]: W1009 09:13:49.111618 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod878c656c_1bab_4b97_8267_910f8890946a.slice/crio-88cab99931ae4bd3ed0b8135cc0dc0f56699f7eb4176bc3a6b2f6eabfc74f431 WatchSource:0}: Error finding container 88cab99931ae4bd3ed0b8135cc0dc0f56699f7eb4176bc3a6b2f6eabfc74f431: Status 404 returned error can't find the container with id 88cab99931ae4bd3ed0b8135cc0dc0f56699f7eb4176bc3a6b2f6eabfc74f431 Oct 09 09:13:49 crc kubenswrapper[4710]: W1009 09:13:49.114169 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a0738a0_347d_48d6_a47e_52e13d52664d.slice/crio-8630ebfb8180094f02b99d46278f8015e243fd206024885b15b8f416d15a39c2 WatchSource:0}: Error finding container 8630ebfb8180094f02b99d46278f8015e243fd206024885b15b8f416d15a39c2: Status 404 returned error can't find the container with id 8630ebfb8180094f02b99d46278f8015e243fd206024885b15b8f416d15a39c2 Oct 09 09:13:49 crc kubenswrapper[4710]: I1009 09:13:49.333529 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-hgq5g" event={"ID":"878c656c-1bab-4b97-8267-910f8890946a","Type":"ContainerStarted","Data":"88cab99931ae4bd3ed0b8135cc0dc0f56699f7eb4176bc3a6b2f6eabfc74f431"} Oct 09 09:13:49 crc kubenswrapper[4710]: I1009 09:13:49.334320 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-dx5ll" event={"ID":"1631e6bf-cf27-4c97-ab40-8b2170648070","Type":"ContainerStarted","Data":"8f1b9c263bb4fea5342f590c3da370600c32799d59ca3191fa0d97d7031a3e3a"} Oct 09 09:13:49 crc kubenswrapper[4710]: I1009 09:13:49.335482 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-7mwtc" event={"ID":"4a0738a0-347d-48d6-a47e-52e13d52664d","Type":"ContainerStarted","Data":"8630ebfb8180094f02b99d46278f8015e243fd206024885b15b8f416d15a39c2"} Oct 09 09:13:52 crc kubenswrapper[4710]: I1009 09:13:52.352822 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-7mwtc" event={"ID":"4a0738a0-347d-48d6-a47e-52e13d52664d","Type":"ContainerStarted","Data":"27326b53e326bf33e906ac65667d92e249ca6436be5cddf28fe4c66e4185b32f"} Oct 09 09:13:52 crc kubenswrapper[4710]: I1009 09:13:52.356756 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-hgq5g" event={"ID":"878c656c-1bab-4b97-8267-910f8890946a","Type":"ContainerStarted","Data":"ba86f5002ee337c63fbe83940042422b7aba5a1e7a7b35e1712ebbbadb4cb253"} Oct 09 09:13:52 crc kubenswrapper[4710]: I1009 09:13:52.356888 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-hgq5g" Oct 09 09:13:52 crc kubenswrapper[4710]: I1009 09:13:52.366310 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-7mwtc" podStartSLOduration=1.660542728 podStartE2EDuration="4.366277752s" podCreationTimestamp="2025-10-09 09:13:48 +0000 UTC" firstStartedPulling="2025-10-09 09:13:49.118468332 +0000 UTC m=+552.608576729" lastFinishedPulling="2025-10-09 09:13:51.824203357 +0000 UTC m=+555.314311753" observedRunningTime="2025-10-09 09:13:52.364792081 +0000 UTC m=+555.854900498" watchObservedRunningTime="2025-10-09 09:13:52.366277752 +0000 UTC m=+555.856386139" Oct 09 09:13:52 crc kubenswrapper[4710]: I1009 09:13:52.383327 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-hgq5g" podStartSLOduration=1.6761669879999999 podStartE2EDuration="4.383305769s" podCreationTimestamp="2025-10-09 09:13:48 +0000 UTC" firstStartedPulling="2025-10-09 09:13:49.113527396 +0000 UTC m=+552.603635794" lastFinishedPulling="2025-10-09 09:13:51.820666177 +0000 UTC m=+555.310774575" observedRunningTime="2025-10-09 09:13:52.37991218 +0000 UTC m=+555.870020578" watchObservedRunningTime="2025-10-09 09:13:52.383305769 +0000 UTC m=+555.873414166" Oct 09 09:13:53 crc kubenswrapper[4710]: I1009 09:13:53.364565 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-dx5ll" event={"ID":"1631e6bf-cf27-4c97-ab40-8b2170648070","Type":"ContainerStarted","Data":"c2dba83cc939db02bd6da04882108431218b8c5c816c91d84b8c84bc8c83ccb9"} Oct 09 09:13:53 crc kubenswrapper[4710]: I1009 09:13:53.379138 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-dx5ll" podStartSLOduration=1.7896820230000001 podStartE2EDuration="5.379117508s" podCreationTimestamp="2025-10-09 09:13:48 +0000 UTC" firstStartedPulling="2025-10-09 09:13:49.085721657 +0000 UTC m=+552.575830054" lastFinishedPulling="2025-10-09 09:13:52.675157142 +0000 UTC m=+556.165265539" observedRunningTime="2025-10-09 09:13:53.376422326 +0000 UTC m=+556.866530723" watchObservedRunningTime="2025-10-09 09:13:53.379117508 +0000 UTC m=+556.869225904" Oct 09 09:13:58 crc kubenswrapper[4710]: I1009 09:13:58.724848 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-hgq5g" Oct 09 09:13:59 crc kubenswrapper[4710]: I1009 09:13:59.874825 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-mxql9"] Oct 09 09:13:59 crc kubenswrapper[4710]: I1009 09:13:59.875203 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovn-controller" containerID="cri-o://d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1" gracePeriod=30 Oct 09 09:13:59 crc kubenswrapper[4710]: I1009 09:13:59.875245 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2" gracePeriod=30 Oct 09 09:13:59 crc kubenswrapper[4710]: I1009 09:13:59.875260 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="northd" containerID="cri-o://5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2" gracePeriod=30 Oct 09 09:13:59 crc kubenswrapper[4710]: I1009 09:13:59.875324 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovn-acl-logging" containerID="cri-o://fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842" gracePeriod=30 Oct 09 09:13:59 crc kubenswrapper[4710]: I1009 09:13:59.875365 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="kube-rbac-proxy-node" containerID="cri-o://f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4" gracePeriod=30 Oct 09 09:13:59 crc kubenswrapper[4710]: I1009 09:13:59.875386 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="sbdb" containerID="cri-o://6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d" gracePeriod=30 Oct 09 09:13:59 crc kubenswrapper[4710]: I1009 09:13:59.875229 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="nbdb" containerID="cri-o://0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad" gracePeriod=30 Oct 09 09:13:59 crc kubenswrapper[4710]: I1009 09:13:59.911411 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovnkube-controller" containerID="cri-o://c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99" gracePeriod=30 Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.131640 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mxql9_0aae2f40-061f-4e34-abaa-11bafcd40ef6/ovnkube-controller/3.log" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.133860 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mxql9_0aae2f40-061f-4e34-abaa-11bafcd40ef6/ovn-acl-logging/0.log" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.134337 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mxql9_0aae2f40-061f-4e34-abaa-11bafcd40ef6/ovn-controller/0.log" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.134752 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.174384 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-9hg86"] Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.174667 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="kubecfg-setup" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.174681 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="kubecfg-setup" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.174693 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="kube-rbac-proxy-ovn-metrics" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.174698 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="kube-rbac-proxy-ovn-metrics" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.174703 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovnkube-controller" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.174708 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovnkube-controller" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.174715 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovn-acl-logging" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.174720 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovn-acl-logging" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.174729 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovn-controller" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.174733 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovn-controller" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.174743 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="sbdb" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.174748 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="sbdb" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.174759 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovnkube-controller" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.174764 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovnkube-controller" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.174770 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovnkube-controller" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.174774 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovnkube-controller" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.174782 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovnkube-controller" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.174787 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovnkube-controller" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.174796 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="nbdb" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.174801 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="nbdb" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.174808 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="kube-rbac-proxy-node" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.174813 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="kube-rbac-proxy-node" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.174821 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="northd" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.174826 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="northd" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.174931 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovn-acl-logging" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.174939 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovnkube-controller" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.174947 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovnkube-controller" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.174969 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="northd" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.174976 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovnkube-controller" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.174982 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovn-controller" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.174988 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="kube-rbac-proxy-node" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.174994 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="kube-rbac-proxy-ovn-metrics" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.175001 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovnkube-controller" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.175008 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="nbdb" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.175015 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="sbdb" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.175020 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovnkube-controller" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.175112 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovnkube-controller" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.175120 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerName="ovnkube-controller" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.182777 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.237658 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0aae2f40-061f-4e34-abaa-11bafcd40ef6-env-overrides\") pod \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.237880 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-cni-netd\") pod \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.237949 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0aae2f40-061f-4e34-abaa-11bafcd40ef6-ovnkube-script-lib\") pod \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.238095 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-cni-bin\") pod \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.238160 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-run-systemd\") pod \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.238217 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-systemd-units\") pod \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.238012 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0aae2f40-061f-4e34-abaa-11bafcd40ef6-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "0aae2f40-061f-4e34-abaa-11bafcd40ef6" (UID: "0aae2f40-061f-4e34-abaa-11bafcd40ef6"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.238036 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "0aae2f40-061f-4e34-abaa-11bafcd40ef6" (UID: "0aae2f40-061f-4e34-abaa-11bafcd40ef6"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.238250 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0aae2f40-061f-4e34-abaa-11bafcd40ef6-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "0aae2f40-061f-4e34-abaa-11bafcd40ef6" (UID: "0aae2f40-061f-4e34-abaa-11bafcd40ef6"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.238270 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "0aae2f40-061f-4e34-abaa-11bafcd40ef6" (UID: "0aae2f40-061f-4e34-abaa-11bafcd40ef6"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.238469 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0aae2f40-061f-4e34-abaa-11bafcd40ef6-ovn-node-metrics-cert\") pod \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.238549 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-etc-openvswitch\") pod \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.238649 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-slash\") pod \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.238579 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "0aae2f40-061f-4e34-abaa-11bafcd40ef6" (UID: "0aae2f40-061f-4e34-abaa-11bafcd40ef6"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.238603 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "0aae2f40-061f-4e34-abaa-11bafcd40ef6" (UID: "0aae2f40-061f-4e34-abaa-11bafcd40ef6"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.238796 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-run-netns\") pod \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.238823 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-slash" (OuterVolumeSpecName: "host-slash") pod "0aae2f40-061f-4e34-abaa-11bafcd40ef6" (UID: "0aae2f40-061f-4e34-abaa-11bafcd40ef6"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.238840 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "0aae2f40-061f-4e34-abaa-11bafcd40ef6" (UID: "0aae2f40-061f-4e34-abaa-11bafcd40ef6"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.238854 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-var-lib-openvswitch\") pod \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.238930 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-node-log\") pod \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.238954 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vlxv\" (UniqueName: \"kubernetes.io/projected/0aae2f40-061f-4e34-abaa-11bafcd40ef6-kube-api-access-5vlxv\") pod \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.238970 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-kubelet\") pod \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.238985 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-log-socket\") pod \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.239002 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.239026 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-run-openvswitch\") pod \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.239040 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-run-ovn-kubernetes\") pod \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.239053 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0aae2f40-061f-4e34-abaa-11bafcd40ef6-ovnkube-config\") pod \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.239079 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-run-ovn\") pod \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\" (UID: \"0aae2f40-061f-4e34-abaa-11bafcd40ef6\") " Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.239205 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "0aae2f40-061f-4e34-abaa-11bafcd40ef6" (UID: "0aae2f40-061f-4e34-abaa-11bafcd40ef6"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.239240 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "0aae2f40-061f-4e34-abaa-11bafcd40ef6" (UID: "0aae2f40-061f-4e34-abaa-11bafcd40ef6"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.239253 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-log-socket" (OuterVolumeSpecName: "log-socket") pod "0aae2f40-061f-4e34-abaa-11bafcd40ef6" (UID: "0aae2f40-061f-4e34-abaa-11bafcd40ef6"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.239266 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "0aae2f40-061f-4e34-abaa-11bafcd40ef6" (UID: "0aae2f40-061f-4e34-abaa-11bafcd40ef6"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.239279 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "0aae2f40-061f-4e34-abaa-11bafcd40ef6" (UID: "0aae2f40-061f-4e34-abaa-11bafcd40ef6"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.239290 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "0aae2f40-061f-4e34-abaa-11bafcd40ef6" (UID: "0aae2f40-061f-4e34-abaa-11bafcd40ef6"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.239215 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-log-socket\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.239649 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-systemd-units\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.239716 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-run-ovn\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.239565 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-node-log" (OuterVolumeSpecName: "node-log") pod "0aae2f40-061f-4e34-abaa-11bafcd40ef6" (UID: "0aae2f40-061f-4e34-abaa-11bafcd40ef6"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.239487 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "0aae2f40-061f-4e34-abaa-11bafcd40ef6" (UID: "0aae2f40-061f-4e34-abaa-11bafcd40ef6"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.239647 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0aae2f40-061f-4e34-abaa-11bafcd40ef6-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "0aae2f40-061f-4e34-abaa-11bafcd40ef6" (UID: "0aae2f40-061f-4e34-abaa-11bafcd40ef6"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.239871 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-node-log\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.239932 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-run-systemd\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.239994 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-host-slash\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240091 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-run-openvswitch\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240121 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wvws\" (UniqueName: \"kubernetes.io/projected/d2bca825-4b95-471c-b1f0-40008ad66a8d-kube-api-access-4wvws\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240148 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d2bca825-4b95-471c-b1f0-40008ad66a8d-ovnkube-script-lib\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240167 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-etc-openvswitch\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240184 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-host-run-netns\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240196 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-var-lib-openvswitch\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240241 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240265 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d2bca825-4b95-471c-b1f0-40008ad66a8d-env-overrides\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240282 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-host-run-ovn-kubernetes\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240298 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-host-kubelet\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240311 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-host-cni-bin\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240339 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d2bca825-4b95-471c-b1f0-40008ad66a8d-ovnkube-config\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240356 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d2bca825-4b95-471c-b1f0-40008ad66a8d-ovn-node-metrics-cert\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240372 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-host-cni-netd\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240401 4710 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-node-log\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240411 4710 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240418 4710 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-log-socket\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240444 4710 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240452 4710 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240460 4710 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240467 4710 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0aae2f40-061f-4e34-abaa-11bafcd40ef6-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240474 4710 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240482 4710 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0aae2f40-061f-4e34-abaa-11bafcd40ef6-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240489 4710 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240496 4710 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0aae2f40-061f-4e34-abaa-11bafcd40ef6-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240505 4710 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240513 4710 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240521 4710 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240527 4710 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-slash\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240544 4710 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.240551 4710 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.243234 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0aae2f40-061f-4e34-abaa-11bafcd40ef6-kube-api-access-5vlxv" (OuterVolumeSpecName: "kube-api-access-5vlxv") pod "0aae2f40-061f-4e34-abaa-11bafcd40ef6" (UID: "0aae2f40-061f-4e34-abaa-11bafcd40ef6"). InnerVolumeSpecName "kube-api-access-5vlxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.243450 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aae2f40-061f-4e34-abaa-11bafcd40ef6-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "0aae2f40-061f-4e34-abaa-11bafcd40ef6" (UID: "0aae2f40-061f-4e34-abaa-11bafcd40ef6"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.250660 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "0aae2f40-061f-4e34-abaa-11bafcd40ef6" (UID: "0aae2f40-061f-4e34-abaa-11bafcd40ef6"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.341847 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-systemd-units\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.341890 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-run-ovn\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.341914 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-run-systemd\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.341930 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-node-log\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.341946 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-host-slash\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.341981 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-host-slash\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.341985 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-run-ovn\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.341996 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-run-systemd\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342021 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-node-log\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342021 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-systemd-units\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342046 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-run-openvswitch\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342091 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-run-openvswitch\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342138 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wvws\" (UniqueName: \"kubernetes.io/projected/d2bca825-4b95-471c-b1f0-40008ad66a8d-kube-api-access-4wvws\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342162 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d2bca825-4b95-471c-b1f0-40008ad66a8d-ovnkube-script-lib\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342191 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-etc-openvswitch\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342218 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-var-lib-openvswitch\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342233 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-host-run-netns\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342269 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342294 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d2bca825-4b95-471c-b1f0-40008ad66a8d-env-overrides\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342316 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-host-run-ovn-kubernetes\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342344 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-host-kubelet\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342343 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-var-lib-openvswitch\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342362 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-host-cni-bin\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342463 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d2bca825-4b95-471c-b1f0-40008ad66a8d-ovnkube-config\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342488 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d2bca825-4b95-471c-b1f0-40008ad66a8d-ovn-node-metrics-cert\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342510 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-host-cni-netd\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342548 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-log-socket\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342605 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vlxv\" (UniqueName: \"kubernetes.io/projected/0aae2f40-061f-4e34-abaa-11bafcd40ef6-kube-api-access-5vlxv\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342617 4710 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0aae2f40-061f-4e34-abaa-11bafcd40ef6-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342628 4710 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0aae2f40-061f-4e34-abaa-11bafcd40ef6-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342657 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-log-socket\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342684 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-host-run-ovn-kubernetes\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342704 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-host-kubelet\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342715 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-etc-openvswitch\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342731 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-host-run-netns\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342724 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-host-cni-bin\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342754 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.342781 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d2bca825-4b95-471c-b1f0-40008ad66a8d-host-cni-netd\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.343059 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d2bca825-4b95-471c-b1f0-40008ad66a8d-ovnkube-script-lib\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.343069 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d2bca825-4b95-471c-b1f0-40008ad66a8d-env-overrides\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.343317 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d2bca825-4b95-471c-b1f0-40008ad66a8d-ovnkube-config\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.347266 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d2bca825-4b95-471c-b1f0-40008ad66a8d-ovn-node-metrics-cert\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.354184 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wvws\" (UniqueName: \"kubernetes.io/projected/d2bca825-4b95-471c-b1f0-40008ad66a8d-kube-api-access-4wvws\") pod \"ovnkube-node-9hg86\" (UID: \"d2bca825-4b95-471c-b1f0-40008ad66a8d\") " pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.409500 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mxql9_0aae2f40-061f-4e34-abaa-11bafcd40ef6/ovnkube-controller/3.log" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.412002 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mxql9_0aae2f40-061f-4e34-abaa-11bafcd40ef6/ovn-acl-logging/0.log" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.412478 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-mxql9_0aae2f40-061f-4e34-abaa-11bafcd40ef6/ovn-controller/0.log" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.412965 4710 generic.go:334] "Generic (PLEG): container finished" podID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerID="c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99" exitCode=0 Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413012 4710 generic.go:334] "Generic (PLEG): container finished" podID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerID="6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d" exitCode=0 Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413021 4710 generic.go:334] "Generic (PLEG): container finished" podID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerID="0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad" exitCode=0 Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413031 4710 generic.go:334] "Generic (PLEG): container finished" podID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerID="5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2" exitCode=0 Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413039 4710 generic.go:334] "Generic (PLEG): container finished" podID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerID="d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2" exitCode=0 Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413047 4710 generic.go:334] "Generic (PLEG): container finished" podID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerID="f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4" exitCode=0 Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413053 4710 generic.go:334] "Generic (PLEG): container finished" podID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerID="fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842" exitCode=143 Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413060 4710 generic.go:334] "Generic (PLEG): container finished" podID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" containerID="d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1" exitCode=143 Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413074 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413072 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerDied","Data":"c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413121 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerDied","Data":"6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413134 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerDied","Data":"0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413143 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerDied","Data":"5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413152 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerDied","Data":"d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413196 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerDied","Data":"f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413210 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413217 4710 scope.go:117] "RemoveContainer" containerID="c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413222 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413307 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413329 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413335 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413341 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413400 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413406 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413411 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413449 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerDied","Data":"fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413474 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413481 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413486 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413491 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413495 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413500 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413504 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413755 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413769 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413775 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413788 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerDied","Data":"d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413851 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413859 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413865 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413871 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413876 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413882 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413887 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413891 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413896 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413919 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413927 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-mxql9" event={"ID":"0aae2f40-061f-4e34-abaa-11bafcd40ef6","Type":"ContainerDied","Data":"87c78efd541350d9d2e5c655a9516e54339a67890de5ef9b93cac19bd3dd8eec"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413936 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413942 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413946 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413951 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413956 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413960 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413965 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413970 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.413975 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.414037 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.415470 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5c9mg_421bdfde-a7ad-4e4c-aa0d-624104899b94/kube-multus/2.log" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.415984 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5c9mg_421bdfde-a7ad-4e4c-aa0d-624104899b94/kube-multus/1.log" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.416017 4710 generic.go:334] "Generic (PLEG): container finished" podID="421bdfde-a7ad-4e4c-aa0d-624104899b94" containerID="40509c86ec1f18b2ce622b6a0ac051e48bf00502c6ebbe86a51d732646149c0c" exitCode=2 Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.416053 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5c9mg" event={"ID":"421bdfde-a7ad-4e4c-aa0d-624104899b94","Type":"ContainerDied","Data":"40509c86ec1f18b2ce622b6a0ac051e48bf00502c6ebbe86a51d732646149c0c"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.416097 4710 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5954ec1339b6e50d0c48c43be96a3a54b58d4fe715bdc47b91aa80b107aad7b9"} Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.416579 4710 scope.go:117] "RemoveContainer" containerID="40509c86ec1f18b2ce622b6a0ac051e48bf00502c6ebbe86a51d732646149c0c" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.416754 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-5c9mg_openshift-multus(421bdfde-a7ad-4e4c-aa0d-624104899b94)\"" pod="openshift-multus/multus-5c9mg" podUID="421bdfde-a7ad-4e4c-aa0d-624104899b94" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.429511 4710 scope.go:117] "RemoveContainer" containerID="494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.453358 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-mxql9"] Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.453472 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-mxql9"] Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.466794 4710 scope.go:117] "RemoveContainer" containerID="6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.481241 4710 scope.go:117] "RemoveContainer" containerID="0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.493730 4710 scope.go:117] "RemoveContainer" containerID="5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.497481 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.515577 4710 scope.go:117] "RemoveContainer" containerID="d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.539616 4710 scope.go:117] "RemoveContainer" containerID="f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.575114 4710 scope.go:117] "RemoveContainer" containerID="fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.595556 4710 scope.go:117] "RemoveContainer" containerID="d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.607232 4710 scope.go:117] "RemoveContainer" containerID="b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.617906 4710 scope.go:117] "RemoveContainer" containerID="c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.618309 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99\": container with ID starting with c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99 not found: ID does not exist" containerID="c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.618343 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99"} err="failed to get container status \"c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99\": rpc error: code = NotFound desc = could not find container \"c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99\": container with ID starting with c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.618367 4710 scope.go:117] "RemoveContainer" containerID="494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.618652 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19\": container with ID starting with 494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19 not found: ID does not exist" containerID="494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.618672 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19"} err="failed to get container status \"494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19\": rpc error: code = NotFound desc = could not find container \"494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19\": container with ID starting with 494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.618685 4710 scope.go:117] "RemoveContainer" containerID="6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.619002 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\": container with ID starting with 6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d not found: ID does not exist" containerID="6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.619020 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d"} err="failed to get container status \"6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\": rpc error: code = NotFound desc = could not find container \"6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\": container with ID starting with 6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.619036 4710 scope.go:117] "RemoveContainer" containerID="0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.619300 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\": container with ID starting with 0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad not found: ID does not exist" containerID="0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.619320 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad"} err="failed to get container status \"0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\": rpc error: code = NotFound desc = could not find container \"0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\": container with ID starting with 0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.619335 4710 scope.go:117] "RemoveContainer" containerID="5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.619851 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\": container with ID starting with 5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2 not found: ID does not exist" containerID="5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.619914 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2"} err="failed to get container status \"5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\": rpc error: code = NotFound desc = could not find container \"5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\": container with ID starting with 5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.619968 4710 scope.go:117] "RemoveContainer" containerID="d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.620575 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\": container with ID starting with d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2 not found: ID does not exist" containerID="d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.620608 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2"} err="failed to get container status \"d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\": rpc error: code = NotFound desc = could not find container \"d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\": container with ID starting with d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.620645 4710 scope.go:117] "RemoveContainer" containerID="f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.621001 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\": container with ID starting with f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4 not found: ID does not exist" containerID="f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.621049 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4"} err="failed to get container status \"f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\": rpc error: code = NotFound desc = could not find container \"f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\": container with ID starting with f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.621063 4710 scope.go:117] "RemoveContainer" containerID="fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.621338 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\": container with ID starting with fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842 not found: ID does not exist" containerID="fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.621377 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842"} err="failed to get container status \"fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\": rpc error: code = NotFound desc = could not find container \"fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\": container with ID starting with fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.621390 4710 scope.go:117] "RemoveContainer" containerID="d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.621726 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\": container with ID starting with d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1 not found: ID does not exist" containerID="d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.621748 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1"} err="failed to get container status \"d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\": rpc error: code = NotFound desc = could not find container \"d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\": container with ID starting with d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.621759 4710 scope.go:117] "RemoveContainer" containerID="b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e" Oct 09 09:14:00 crc kubenswrapper[4710]: E1009 09:14:00.622181 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\": container with ID starting with b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e not found: ID does not exist" containerID="b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.622202 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e"} err="failed to get container status \"b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\": rpc error: code = NotFound desc = could not find container \"b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\": container with ID starting with b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.622216 4710 scope.go:117] "RemoveContainer" containerID="c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.623037 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99"} err="failed to get container status \"c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99\": rpc error: code = NotFound desc = could not find container \"c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99\": container with ID starting with c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.623081 4710 scope.go:117] "RemoveContainer" containerID="494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.623398 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19"} err="failed to get container status \"494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19\": rpc error: code = NotFound desc = could not find container \"494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19\": container with ID starting with 494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.623425 4710 scope.go:117] "RemoveContainer" containerID="6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.623816 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d"} err="failed to get container status \"6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\": rpc error: code = NotFound desc = could not find container \"6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\": container with ID starting with 6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.623834 4710 scope.go:117] "RemoveContainer" containerID="0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.624142 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad"} err="failed to get container status \"0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\": rpc error: code = NotFound desc = could not find container \"0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\": container with ID starting with 0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.624161 4710 scope.go:117] "RemoveContainer" containerID="5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.624495 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2"} err="failed to get container status \"5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\": rpc error: code = NotFound desc = could not find container \"5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\": container with ID starting with 5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.624535 4710 scope.go:117] "RemoveContainer" containerID="d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.625407 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2"} err="failed to get container status \"d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\": rpc error: code = NotFound desc = could not find container \"d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\": container with ID starting with d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.625440 4710 scope.go:117] "RemoveContainer" containerID="f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.625770 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4"} err="failed to get container status \"f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\": rpc error: code = NotFound desc = could not find container \"f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\": container with ID starting with f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.625824 4710 scope.go:117] "RemoveContainer" containerID="fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.626461 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842"} err="failed to get container status \"fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\": rpc error: code = NotFound desc = could not find container \"fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\": container with ID starting with fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.626502 4710 scope.go:117] "RemoveContainer" containerID="d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.626838 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1"} err="failed to get container status \"d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\": rpc error: code = NotFound desc = could not find container \"d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\": container with ID starting with d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.626869 4710 scope.go:117] "RemoveContainer" containerID="b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.627059 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e"} err="failed to get container status \"b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\": rpc error: code = NotFound desc = could not find container \"b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\": container with ID starting with b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.627079 4710 scope.go:117] "RemoveContainer" containerID="c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.627265 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99"} err="failed to get container status \"c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99\": rpc error: code = NotFound desc = could not find container \"c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99\": container with ID starting with c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.627283 4710 scope.go:117] "RemoveContainer" containerID="494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.627478 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19"} err="failed to get container status \"494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19\": rpc error: code = NotFound desc = could not find container \"494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19\": container with ID starting with 494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.627497 4710 scope.go:117] "RemoveContainer" containerID="6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.627700 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d"} err="failed to get container status \"6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\": rpc error: code = NotFound desc = could not find container \"6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\": container with ID starting with 6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.627724 4710 scope.go:117] "RemoveContainer" containerID="0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.627876 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad"} err="failed to get container status \"0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\": rpc error: code = NotFound desc = could not find container \"0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\": container with ID starting with 0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.627893 4710 scope.go:117] "RemoveContainer" containerID="5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.628033 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2"} err="failed to get container status \"5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\": rpc error: code = NotFound desc = could not find container \"5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\": container with ID starting with 5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.628050 4710 scope.go:117] "RemoveContainer" containerID="d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.628254 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2"} err="failed to get container status \"d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\": rpc error: code = NotFound desc = could not find container \"d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\": container with ID starting with d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.628267 4710 scope.go:117] "RemoveContainer" containerID="f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.628418 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4"} err="failed to get container status \"f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\": rpc error: code = NotFound desc = could not find container \"f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\": container with ID starting with f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.628512 4710 scope.go:117] "RemoveContainer" containerID="fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.628710 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842"} err="failed to get container status \"fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\": rpc error: code = NotFound desc = could not find container \"fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\": container with ID starting with fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.628728 4710 scope.go:117] "RemoveContainer" containerID="d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.628868 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1"} err="failed to get container status \"d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\": rpc error: code = NotFound desc = could not find container \"d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\": container with ID starting with d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.628890 4710 scope.go:117] "RemoveContainer" containerID="b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.629092 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e"} err="failed to get container status \"b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\": rpc error: code = NotFound desc = could not find container \"b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\": container with ID starting with b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.629116 4710 scope.go:117] "RemoveContainer" containerID="c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.629383 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99"} err="failed to get container status \"c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99\": rpc error: code = NotFound desc = could not find container \"c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99\": container with ID starting with c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.629407 4710 scope.go:117] "RemoveContainer" containerID="494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.630900 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19"} err="failed to get container status \"494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19\": rpc error: code = NotFound desc = could not find container \"494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19\": container with ID starting with 494948fc0519e3bb9a82ebd6a60c5a07a91c5d6a51b807559803d99ac44fdf19 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.630943 4710 scope.go:117] "RemoveContainer" containerID="6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.631192 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d"} err="failed to get container status \"6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\": rpc error: code = NotFound desc = could not find container \"6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d\": container with ID starting with 6ad796152fb7cadf9219ebc97a7ae7c69253b5bb84080b472b194408f1d3765d not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.631215 4710 scope.go:117] "RemoveContainer" containerID="0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.631554 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad"} err="failed to get container status \"0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\": rpc error: code = NotFound desc = could not find container \"0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad\": container with ID starting with 0dae8caf431a550eeaffc059b28bdb048e4a283343813f73afff36f732e9b2ad not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.631573 4710 scope.go:117] "RemoveContainer" containerID="5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.631833 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2"} err="failed to get container status \"5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\": rpc error: code = NotFound desc = could not find container \"5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2\": container with ID starting with 5f0d195954c95d0e921dfffbd613209e472789055ef183d20c3b8c07601e8be2 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.631855 4710 scope.go:117] "RemoveContainer" containerID="d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.632166 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2"} err="failed to get container status \"d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\": rpc error: code = NotFound desc = could not find container \"d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2\": container with ID starting with d897745337affd0710c102ce130d9067537f04a4bd08ff87e273cc018d6294c2 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.632190 4710 scope.go:117] "RemoveContainer" containerID="f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.632506 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4"} err="failed to get container status \"f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\": rpc error: code = NotFound desc = could not find container \"f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4\": container with ID starting with f96f9f31cd4f0e9543da2e6d1fbdd5355307d93796604da004f177a0b944e0f4 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.632531 4710 scope.go:117] "RemoveContainer" containerID="fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.632817 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842"} err="failed to get container status \"fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\": rpc error: code = NotFound desc = could not find container \"fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842\": container with ID starting with fa1c1153cba4ced949f340a444258d38e62f2ab12ce151e1bc48acdb2d191842 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.632839 4710 scope.go:117] "RemoveContainer" containerID="d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.633282 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1"} err="failed to get container status \"d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\": rpc error: code = NotFound desc = could not find container \"d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1\": container with ID starting with d3a473bb71e14ac18c06640c62e79e02baf53a558195fce0b7c76dc72c1f8dc1 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.633300 4710 scope.go:117] "RemoveContainer" containerID="b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.633785 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e"} err="failed to get container status \"b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\": rpc error: code = NotFound desc = could not find container \"b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e\": container with ID starting with b47ee25d1ac5e6f5c53528565b7a7e2af091c4aa322e9b61956322b6265d172e not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.633822 4710 scope.go:117] "RemoveContainer" containerID="c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.634162 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99"} err="failed to get container status \"c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99\": rpc error: code = NotFound desc = could not find container \"c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99\": container with ID starting with c27fd473d50f0485e5b3d77997bbe2df0fec77f02222cd263127da7f950edc99 not found: ID does not exist" Oct 09 09:14:00 crc kubenswrapper[4710]: I1009 09:14:00.821412 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0aae2f40-061f-4e34-abaa-11bafcd40ef6" path="/var/lib/kubelet/pods/0aae2f40-061f-4e34-abaa-11bafcd40ef6/volumes" Oct 09 09:14:01 crc kubenswrapper[4710]: I1009 09:14:01.424276 4710 generic.go:334] "Generic (PLEG): container finished" podID="d2bca825-4b95-471c-b1f0-40008ad66a8d" containerID="9f1347d0dc750069bf41522818ab7897a4af4b38fdfff0d08f5a337777f843ab" exitCode=0 Oct 09 09:14:01 crc kubenswrapper[4710]: I1009 09:14:01.424323 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" event={"ID":"d2bca825-4b95-471c-b1f0-40008ad66a8d","Type":"ContainerDied","Data":"9f1347d0dc750069bf41522818ab7897a4af4b38fdfff0d08f5a337777f843ab"} Oct 09 09:14:01 crc kubenswrapper[4710]: I1009 09:14:01.424359 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" event={"ID":"d2bca825-4b95-471c-b1f0-40008ad66a8d","Type":"ContainerStarted","Data":"6a30e1465230d72889bb13eba43832a32c020b0cc750a4f589d6f1e5d47b63b7"} Oct 09 09:14:02 crc kubenswrapper[4710]: I1009 09:14:02.444155 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" event={"ID":"d2bca825-4b95-471c-b1f0-40008ad66a8d","Type":"ContainerStarted","Data":"693ca63cc27e7cbd6ed27e3a727655be195d51baf96c0af6239d995f57e9311e"} Oct 09 09:14:02 crc kubenswrapper[4710]: I1009 09:14:02.444537 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" event={"ID":"d2bca825-4b95-471c-b1f0-40008ad66a8d","Type":"ContainerStarted","Data":"e6be50fc656bb765e0ff5f1d2981f1c0d4429f818a6f564b810b3ff27ccc6dbc"} Oct 09 09:14:02 crc kubenswrapper[4710]: I1009 09:14:02.444553 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" event={"ID":"d2bca825-4b95-471c-b1f0-40008ad66a8d","Type":"ContainerStarted","Data":"899070331a849937d54379ef0d5a588db01cb3e002c2c4970cb1b7fc5b2042f5"} Oct 09 09:14:02 crc kubenswrapper[4710]: I1009 09:14:02.444563 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" event={"ID":"d2bca825-4b95-471c-b1f0-40008ad66a8d","Type":"ContainerStarted","Data":"a66da84344f30fe3a7b5ea7f18686e79fa7d3ea25a2066c65a198dc0c668287e"} Oct 09 09:14:02 crc kubenswrapper[4710]: I1009 09:14:02.444573 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" event={"ID":"d2bca825-4b95-471c-b1f0-40008ad66a8d","Type":"ContainerStarted","Data":"04bf03ce114c753e1a89d7146901f1ba4c9f0a1f3d6290f2b1c45061922932a9"} Oct 09 09:14:02 crc kubenswrapper[4710]: I1009 09:14:02.444581 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" event={"ID":"d2bca825-4b95-471c-b1f0-40008ad66a8d","Type":"ContainerStarted","Data":"8b1e101836b775f048dfbe6f1fc684f858437295dcbe2926dce99a16251341c1"} Oct 09 09:14:04 crc kubenswrapper[4710]: I1009 09:14:04.460619 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" event={"ID":"d2bca825-4b95-471c-b1f0-40008ad66a8d","Type":"ContainerStarted","Data":"555c417c3da4d67d6dab126716ed2deb97b725bea77ef39914fe89d117f2681a"} Oct 09 09:14:05 crc kubenswrapper[4710]: I1009 09:14:05.545851 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:14:05 crc kubenswrapper[4710]: I1009 09:14:05.545963 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:14:05 crc kubenswrapper[4710]: I1009 09:14:05.546028 4710 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:14:05 crc kubenswrapper[4710]: I1009 09:14:05.546953 4710 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"67796ccaaae375d049d846cb98a7d56281b37ddbf6ec7f175f6fa0bd1c54ba36"} pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 09:14:05 crc kubenswrapper[4710]: I1009 09:14:05.547039 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" containerID="cri-o://67796ccaaae375d049d846cb98a7d56281b37ddbf6ec7f175f6fa0bd1c54ba36" gracePeriod=600 Oct 09 09:14:06 crc kubenswrapper[4710]: I1009 09:14:06.473267 4710 generic.go:334] "Generic (PLEG): container finished" podID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerID="67796ccaaae375d049d846cb98a7d56281b37ddbf6ec7f175f6fa0bd1c54ba36" exitCode=0 Oct 09 09:14:06 crc kubenswrapper[4710]: I1009 09:14:06.473338 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerDied","Data":"67796ccaaae375d049d846cb98a7d56281b37ddbf6ec7f175f6fa0bd1c54ba36"} Oct 09 09:14:06 crc kubenswrapper[4710]: I1009 09:14:06.473927 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerStarted","Data":"4d64191b75158ae2723b09865a9bd6d7523a53aa9da415f79edde49e77da405d"} Oct 09 09:14:06 crc kubenswrapper[4710]: I1009 09:14:06.473953 4710 scope.go:117] "RemoveContainer" containerID="c7e7af49e6c1ca0ce0353e8934a08e6c28c703255882b4f40e6190d8a146137c" Oct 09 09:14:06 crc kubenswrapper[4710]: I1009 09:14:06.481910 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" event={"ID":"d2bca825-4b95-471c-b1f0-40008ad66a8d","Type":"ContainerStarted","Data":"50af9854d31979b7efb2e13ff588c6954a71f2e5617651c8c69fc3a979af487d"} Oct 09 09:14:06 crc kubenswrapper[4710]: I1009 09:14:06.482135 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:06 crc kubenswrapper[4710]: I1009 09:14:06.482187 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:06 crc kubenswrapper[4710]: I1009 09:14:06.509910 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:06 crc kubenswrapper[4710]: I1009 09:14:06.517518 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" podStartSLOduration=6.517508215 podStartE2EDuration="6.517508215s" podCreationTimestamp="2025-10-09 09:14:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:14:06.515478006 +0000 UTC m=+570.005586403" watchObservedRunningTime="2025-10-09 09:14:06.517508215 +0000 UTC m=+570.007616612" Oct 09 09:14:07 crc kubenswrapper[4710]: I1009 09:14:07.490925 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:07 crc kubenswrapper[4710]: I1009 09:14:07.517866 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:15 crc kubenswrapper[4710]: I1009 09:14:15.815125 4710 scope.go:117] "RemoveContainer" containerID="40509c86ec1f18b2ce622b6a0ac051e48bf00502c6ebbe86a51d732646149c0c" Oct 09 09:14:15 crc kubenswrapper[4710]: E1009 09:14:15.816498 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-5c9mg_openshift-multus(421bdfde-a7ad-4e4c-aa0d-624104899b94)\"" pod="openshift-multus/multus-5c9mg" podUID="421bdfde-a7ad-4e4c-aa0d-624104899b94" Oct 09 09:14:29 crc kubenswrapper[4710]: I1009 09:14:29.071614 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp"] Oct 09 09:14:29 crc kubenswrapper[4710]: I1009 09:14:29.073961 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" Oct 09 09:14:29 crc kubenswrapper[4710]: I1009 09:14:29.075684 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 09 09:14:29 crc kubenswrapper[4710]: I1009 09:14:29.082349 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp"] Oct 09 09:14:29 crc kubenswrapper[4710]: I1009 09:14:29.173652 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/98d5ef90-e17a-4c38-b665-ad2311e7b3b1-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp\" (UID: \"98d5ef90-e17a-4c38-b665-ad2311e7b3b1\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" Oct 09 09:14:29 crc kubenswrapper[4710]: I1009 09:14:29.173741 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/98d5ef90-e17a-4c38-b665-ad2311e7b3b1-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp\" (UID: \"98d5ef90-e17a-4c38-b665-ad2311e7b3b1\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" Oct 09 09:14:29 crc kubenswrapper[4710]: I1009 09:14:29.173762 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkmlh\" (UniqueName: \"kubernetes.io/projected/98d5ef90-e17a-4c38-b665-ad2311e7b3b1-kube-api-access-zkmlh\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp\" (UID: \"98d5ef90-e17a-4c38-b665-ad2311e7b3b1\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" Oct 09 09:14:29 crc kubenswrapper[4710]: I1009 09:14:29.275190 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/98d5ef90-e17a-4c38-b665-ad2311e7b3b1-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp\" (UID: \"98d5ef90-e17a-4c38-b665-ad2311e7b3b1\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" Oct 09 09:14:29 crc kubenswrapper[4710]: I1009 09:14:29.275259 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/98d5ef90-e17a-4c38-b665-ad2311e7b3b1-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp\" (UID: \"98d5ef90-e17a-4c38-b665-ad2311e7b3b1\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" Oct 09 09:14:29 crc kubenswrapper[4710]: I1009 09:14:29.275282 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkmlh\" (UniqueName: \"kubernetes.io/projected/98d5ef90-e17a-4c38-b665-ad2311e7b3b1-kube-api-access-zkmlh\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp\" (UID: \"98d5ef90-e17a-4c38-b665-ad2311e7b3b1\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" Oct 09 09:14:29 crc kubenswrapper[4710]: I1009 09:14:29.275909 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/98d5ef90-e17a-4c38-b665-ad2311e7b3b1-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp\" (UID: \"98d5ef90-e17a-4c38-b665-ad2311e7b3b1\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" Oct 09 09:14:29 crc kubenswrapper[4710]: I1009 09:14:29.276014 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/98d5ef90-e17a-4c38-b665-ad2311e7b3b1-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp\" (UID: \"98d5ef90-e17a-4c38-b665-ad2311e7b3b1\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" Oct 09 09:14:29 crc kubenswrapper[4710]: I1009 09:14:29.294770 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkmlh\" (UniqueName: \"kubernetes.io/projected/98d5ef90-e17a-4c38-b665-ad2311e7b3b1-kube-api-access-zkmlh\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp\" (UID: \"98d5ef90-e17a-4c38-b665-ad2311e7b3b1\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" Oct 09 09:14:29 crc kubenswrapper[4710]: I1009 09:14:29.391868 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" Oct 09 09:14:29 crc kubenswrapper[4710]: E1009 09:14:29.418225 4710 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_openshift-marketplace_98d5ef90-e17a-4c38-b665-ad2311e7b3b1_0(576b481871d1c6d261b26a0e0c5ae177ffcbcc196e4834bf88c293ee66cb4a47): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 09 09:14:29 crc kubenswrapper[4710]: E1009 09:14:29.418304 4710 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_openshift-marketplace_98d5ef90-e17a-4c38-b665-ad2311e7b3b1_0(576b481871d1c6d261b26a0e0c5ae177ffcbcc196e4834bf88c293ee66cb4a47): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" Oct 09 09:14:29 crc kubenswrapper[4710]: E1009 09:14:29.418346 4710 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_openshift-marketplace_98d5ef90-e17a-4c38-b665-ad2311e7b3b1_0(576b481871d1c6d261b26a0e0c5ae177ffcbcc196e4834bf88c293ee66cb4a47): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" Oct 09 09:14:29 crc kubenswrapper[4710]: E1009 09:14:29.418415 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_openshift-marketplace(98d5ef90-e17a-4c38-b665-ad2311e7b3b1)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_openshift-marketplace(98d5ef90-e17a-4c38-b665-ad2311e7b3b1)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_openshift-marketplace_98d5ef90-e17a-4c38-b665-ad2311e7b3b1_0(576b481871d1c6d261b26a0e0c5ae177ffcbcc196e4834bf88c293ee66cb4a47): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" podUID="98d5ef90-e17a-4c38-b665-ad2311e7b3b1" Oct 09 09:14:29 crc kubenswrapper[4710]: I1009 09:14:29.599680 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" Oct 09 09:14:29 crc kubenswrapper[4710]: I1009 09:14:29.600039 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" Oct 09 09:14:29 crc kubenswrapper[4710]: E1009 09:14:29.623016 4710 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_openshift-marketplace_98d5ef90-e17a-4c38-b665-ad2311e7b3b1_0(a409ac71c37936d3a2d6e4751d1639da22c02f898f345821454317766c473e3f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 09 09:14:29 crc kubenswrapper[4710]: E1009 09:14:29.623093 4710 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_openshift-marketplace_98d5ef90-e17a-4c38-b665-ad2311e7b3b1_0(a409ac71c37936d3a2d6e4751d1639da22c02f898f345821454317766c473e3f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" Oct 09 09:14:29 crc kubenswrapper[4710]: E1009 09:14:29.623118 4710 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_openshift-marketplace_98d5ef90-e17a-4c38-b665-ad2311e7b3b1_0(a409ac71c37936d3a2d6e4751d1639da22c02f898f345821454317766c473e3f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" Oct 09 09:14:29 crc kubenswrapper[4710]: E1009 09:14:29.623172 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_openshift-marketplace(98d5ef90-e17a-4c38-b665-ad2311e7b3b1)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_openshift-marketplace(98d5ef90-e17a-4c38-b665-ad2311e7b3b1)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_openshift-marketplace_98d5ef90-e17a-4c38-b665-ad2311e7b3b1_0(a409ac71c37936d3a2d6e4751d1639da22c02f898f345821454317766c473e3f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" podUID="98d5ef90-e17a-4c38-b665-ad2311e7b3b1" Oct 09 09:14:29 crc kubenswrapper[4710]: I1009 09:14:29.815265 4710 scope.go:117] "RemoveContainer" containerID="40509c86ec1f18b2ce622b6a0ac051e48bf00502c6ebbe86a51d732646149c0c" Oct 09 09:14:30 crc kubenswrapper[4710]: I1009 09:14:30.517054 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-9hg86" Oct 09 09:14:30 crc kubenswrapper[4710]: I1009 09:14:30.608112 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5c9mg_421bdfde-a7ad-4e4c-aa0d-624104899b94/kube-multus/2.log" Oct 09 09:14:30 crc kubenswrapper[4710]: I1009 09:14:30.608500 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5c9mg_421bdfde-a7ad-4e4c-aa0d-624104899b94/kube-multus/1.log" Oct 09 09:14:30 crc kubenswrapper[4710]: I1009 09:14:30.608555 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-5c9mg" event={"ID":"421bdfde-a7ad-4e4c-aa0d-624104899b94","Type":"ContainerStarted","Data":"4e3d309a2cf754c4c42ccb535c292dd0e2108f20ee3adc4abd9762a253e49610"} Oct 09 09:14:36 crc kubenswrapper[4710]: I1009 09:14:36.949158 4710 scope.go:117] "RemoveContainer" containerID="5954ec1339b6e50d0c48c43be96a3a54b58d4fe715bdc47b91aa80b107aad7b9" Oct 09 09:14:37 crc kubenswrapper[4710]: I1009 09:14:37.642054 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-5c9mg_421bdfde-a7ad-4e4c-aa0d-624104899b94/kube-multus/2.log" Oct 09 09:14:41 crc kubenswrapper[4710]: I1009 09:14:41.814894 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" Oct 09 09:14:41 crc kubenswrapper[4710]: I1009 09:14:41.815570 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" Oct 09 09:14:41 crc kubenswrapper[4710]: I1009 09:14:41.979740 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp"] Oct 09 09:14:42 crc kubenswrapper[4710]: I1009 09:14:42.665084 4710 generic.go:334] "Generic (PLEG): container finished" podID="98d5ef90-e17a-4c38-b665-ad2311e7b3b1" containerID="7efa666f9c4a567686ed5ca8edd3ca66a5f3b48d25be9eb08ad0ef922e3d115c" exitCode=0 Oct 09 09:14:42 crc kubenswrapper[4710]: I1009 09:14:42.665147 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" event={"ID":"98d5ef90-e17a-4c38-b665-ad2311e7b3b1","Type":"ContainerDied","Data":"7efa666f9c4a567686ed5ca8edd3ca66a5f3b48d25be9eb08ad0ef922e3d115c"} Oct 09 09:14:42 crc kubenswrapper[4710]: I1009 09:14:42.665481 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" event={"ID":"98d5ef90-e17a-4c38-b665-ad2311e7b3b1","Type":"ContainerStarted","Data":"1c62991fe37b959b5e507c74394f58abfa89ce682acae2389e9ae150a7b258cf"} Oct 09 09:14:44 crc kubenswrapper[4710]: I1009 09:14:44.678968 4710 generic.go:334] "Generic (PLEG): container finished" podID="98d5ef90-e17a-4c38-b665-ad2311e7b3b1" containerID="dd46db582bc8bba09ef97fba7d699947a832e4750e89c33bf80608b2623a3645" exitCode=0 Oct 09 09:14:44 crc kubenswrapper[4710]: I1009 09:14:44.679053 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" event={"ID":"98d5ef90-e17a-4c38-b665-ad2311e7b3b1","Type":"ContainerDied","Data":"dd46db582bc8bba09ef97fba7d699947a832e4750e89c33bf80608b2623a3645"} Oct 09 09:14:45 crc kubenswrapper[4710]: I1009 09:14:45.688087 4710 generic.go:334] "Generic (PLEG): container finished" podID="98d5ef90-e17a-4c38-b665-ad2311e7b3b1" containerID="81bfa5cb42470d529dcfe47ba5989374b4079184cb1d3b4afb319fa2c9966dd8" exitCode=0 Oct 09 09:14:45 crc kubenswrapper[4710]: I1009 09:14:45.688131 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" event={"ID":"98d5ef90-e17a-4c38-b665-ad2311e7b3b1","Type":"ContainerDied","Data":"81bfa5cb42470d529dcfe47ba5989374b4079184cb1d3b4afb319fa2c9966dd8"} Oct 09 09:14:46 crc kubenswrapper[4710]: I1009 09:14:46.860954 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" Oct 09 09:14:46 crc kubenswrapper[4710]: I1009 09:14:46.953710 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/98d5ef90-e17a-4c38-b665-ad2311e7b3b1-util\") pod \"98d5ef90-e17a-4c38-b665-ad2311e7b3b1\" (UID: \"98d5ef90-e17a-4c38-b665-ad2311e7b3b1\") " Oct 09 09:14:46 crc kubenswrapper[4710]: I1009 09:14:46.953831 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/98d5ef90-e17a-4c38-b665-ad2311e7b3b1-bundle\") pod \"98d5ef90-e17a-4c38-b665-ad2311e7b3b1\" (UID: \"98d5ef90-e17a-4c38-b665-ad2311e7b3b1\") " Oct 09 09:14:46 crc kubenswrapper[4710]: I1009 09:14:46.953870 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkmlh\" (UniqueName: \"kubernetes.io/projected/98d5ef90-e17a-4c38-b665-ad2311e7b3b1-kube-api-access-zkmlh\") pod \"98d5ef90-e17a-4c38-b665-ad2311e7b3b1\" (UID: \"98d5ef90-e17a-4c38-b665-ad2311e7b3b1\") " Oct 09 09:14:46 crc kubenswrapper[4710]: I1009 09:14:46.955396 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98d5ef90-e17a-4c38-b665-ad2311e7b3b1-bundle" (OuterVolumeSpecName: "bundle") pod "98d5ef90-e17a-4c38-b665-ad2311e7b3b1" (UID: "98d5ef90-e17a-4c38-b665-ad2311e7b3b1"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:14:46 crc kubenswrapper[4710]: I1009 09:14:46.960191 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98d5ef90-e17a-4c38-b665-ad2311e7b3b1-kube-api-access-zkmlh" (OuterVolumeSpecName: "kube-api-access-zkmlh") pod "98d5ef90-e17a-4c38-b665-ad2311e7b3b1" (UID: "98d5ef90-e17a-4c38-b665-ad2311e7b3b1"). InnerVolumeSpecName "kube-api-access-zkmlh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:14:46 crc kubenswrapper[4710]: I1009 09:14:46.964477 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98d5ef90-e17a-4c38-b665-ad2311e7b3b1-util" (OuterVolumeSpecName: "util") pod "98d5ef90-e17a-4c38-b665-ad2311e7b3b1" (UID: "98d5ef90-e17a-4c38-b665-ad2311e7b3b1"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:14:47 crc kubenswrapper[4710]: I1009 09:14:47.054734 4710 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/98d5ef90-e17a-4c38-b665-ad2311e7b3b1-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:47 crc kubenswrapper[4710]: I1009 09:14:47.054760 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkmlh\" (UniqueName: \"kubernetes.io/projected/98d5ef90-e17a-4c38-b665-ad2311e7b3b1-kube-api-access-zkmlh\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:47 crc kubenswrapper[4710]: I1009 09:14:47.054770 4710 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/98d5ef90-e17a-4c38-b665-ad2311e7b3b1-util\") on node \"crc\" DevicePath \"\"" Oct 09 09:14:47 crc kubenswrapper[4710]: I1009 09:14:47.700869 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" event={"ID":"98d5ef90-e17a-4c38-b665-ad2311e7b3b1","Type":"ContainerDied","Data":"1c62991fe37b959b5e507c74394f58abfa89ce682acae2389e9ae150a7b258cf"} Oct 09 09:14:47 crc kubenswrapper[4710]: I1009 09:14:47.701156 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c62991fe37b959b5e507c74394f58abfa89ce682acae2389e9ae150a7b258cf" Oct 09 09:14:47 crc kubenswrapper[4710]: I1009 09:14:47.700925 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp" Oct 09 09:14:50 crc kubenswrapper[4710]: I1009 09:14:50.607676 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-c98j9"] Oct 09 09:14:50 crc kubenswrapper[4710]: E1009 09:14:50.607841 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98d5ef90-e17a-4c38-b665-ad2311e7b3b1" containerName="extract" Oct 09 09:14:50 crc kubenswrapper[4710]: I1009 09:14:50.607853 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="98d5ef90-e17a-4c38-b665-ad2311e7b3b1" containerName="extract" Oct 09 09:14:50 crc kubenswrapper[4710]: E1009 09:14:50.607861 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98d5ef90-e17a-4c38-b665-ad2311e7b3b1" containerName="pull" Oct 09 09:14:50 crc kubenswrapper[4710]: I1009 09:14:50.607866 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="98d5ef90-e17a-4c38-b665-ad2311e7b3b1" containerName="pull" Oct 09 09:14:50 crc kubenswrapper[4710]: E1009 09:14:50.607881 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98d5ef90-e17a-4c38-b665-ad2311e7b3b1" containerName="util" Oct 09 09:14:50 crc kubenswrapper[4710]: I1009 09:14:50.607886 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="98d5ef90-e17a-4c38-b665-ad2311e7b3b1" containerName="util" Oct 09 09:14:50 crc kubenswrapper[4710]: I1009 09:14:50.607968 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="98d5ef90-e17a-4c38-b665-ad2311e7b3b1" containerName="extract" Oct 09 09:14:50 crc kubenswrapper[4710]: I1009 09:14:50.608276 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-c98j9" Oct 09 09:14:50 crc kubenswrapper[4710]: I1009 09:14:50.610494 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Oct 09 09:14:50 crc kubenswrapper[4710]: I1009 09:14:50.611372 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-d7dnc" Oct 09 09:14:50 crc kubenswrapper[4710]: I1009 09:14:50.611549 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Oct 09 09:14:50 crc kubenswrapper[4710]: I1009 09:14:50.614710 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-c98j9"] Oct 09 09:14:50 crc kubenswrapper[4710]: I1009 09:14:50.702826 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jd8f7\" (UniqueName: \"kubernetes.io/projected/35322d9f-0b0d-40a7-b13c-7763f5027a59-kube-api-access-jd8f7\") pod \"nmstate-operator-858ddd8f98-c98j9\" (UID: \"35322d9f-0b0d-40a7-b13c-7763f5027a59\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-c98j9" Oct 09 09:14:50 crc kubenswrapper[4710]: I1009 09:14:50.804021 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jd8f7\" (UniqueName: \"kubernetes.io/projected/35322d9f-0b0d-40a7-b13c-7763f5027a59-kube-api-access-jd8f7\") pod \"nmstate-operator-858ddd8f98-c98j9\" (UID: \"35322d9f-0b0d-40a7-b13c-7763f5027a59\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-c98j9" Oct 09 09:14:50 crc kubenswrapper[4710]: I1009 09:14:50.820254 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jd8f7\" (UniqueName: \"kubernetes.io/projected/35322d9f-0b0d-40a7-b13c-7763f5027a59-kube-api-access-jd8f7\") pod \"nmstate-operator-858ddd8f98-c98j9\" (UID: \"35322d9f-0b0d-40a7-b13c-7763f5027a59\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-c98j9" Oct 09 09:14:50 crc kubenswrapper[4710]: I1009 09:14:50.919797 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-c98j9" Oct 09 09:14:51 crc kubenswrapper[4710]: I1009 09:14:51.294151 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-c98j9"] Oct 09 09:14:51 crc kubenswrapper[4710]: I1009 09:14:51.720904 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-c98j9" event={"ID":"35322d9f-0b0d-40a7-b13c-7763f5027a59","Type":"ContainerStarted","Data":"8a5627d12a83e85c4ae74b2e6da2fd647d9163193a68b8674318ed2fa594cd3e"} Oct 09 09:14:53 crc kubenswrapper[4710]: I1009 09:14:53.735401 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-c98j9" event={"ID":"35322d9f-0b0d-40a7-b13c-7763f5027a59","Type":"ContainerStarted","Data":"62b6c872d368c9e1c174e577eb6ae0c761d1a6489566a69615e2b4769ef33118"} Oct 09 09:14:53 crc kubenswrapper[4710]: I1009 09:14:53.752944 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-858ddd8f98-c98j9" podStartSLOduration=1.616597853 podStartE2EDuration="3.75291927s" podCreationTimestamp="2025-10-09 09:14:50 +0000 UTC" firstStartedPulling="2025-10-09 09:14:51.295454697 +0000 UTC m=+614.785563095" lastFinishedPulling="2025-10-09 09:14:53.431776115 +0000 UTC m=+616.921884512" observedRunningTime="2025-10-09 09:14:53.748713431 +0000 UTC m=+617.238821828" watchObservedRunningTime="2025-10-09 09:14:53.75291927 +0000 UTC m=+617.243027667" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.716373 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-bcx7j"] Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.718377 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-bcx7j" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.722489 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-z2d8r" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.726179 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-sfv4r"] Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.727365 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sfv4r" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.732663 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.740403 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-sfv4r"] Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.744137 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-bcx7j"] Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.766043 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-zcxkk"] Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.766758 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-zcxkk" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.818473 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/f174bd18-14b5-495f-8d34-795eca72dc06-ovs-socket\") pod \"nmstate-handler-zcxkk\" (UID: \"f174bd18-14b5-495f-8d34-795eca72dc06\") " pod="openshift-nmstate/nmstate-handler-zcxkk" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.818524 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mrms\" (UniqueName: \"kubernetes.io/projected/89eda8ab-752d-4dc4-af4a-009431208f96-kube-api-access-8mrms\") pod \"nmstate-webhook-6cdbc54649-sfv4r\" (UID: \"89eda8ab-752d-4dc4-af4a-009431208f96\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sfv4r" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.818547 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/89eda8ab-752d-4dc4-af4a-009431208f96-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-sfv4r\" (UID: \"89eda8ab-752d-4dc4-af4a-009431208f96\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sfv4r" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.818584 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gttd\" (UniqueName: \"kubernetes.io/projected/0899a565-e72a-498c-9071-7b05ccb027bd-kube-api-access-5gttd\") pod \"nmstate-metrics-fdff9cb8d-bcx7j\" (UID: \"0899a565-e72a-498c-9071-7b05ccb027bd\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-bcx7j" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.818610 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/f174bd18-14b5-495f-8d34-795eca72dc06-dbus-socket\") pod \"nmstate-handler-zcxkk\" (UID: \"f174bd18-14b5-495f-8d34-795eca72dc06\") " pod="openshift-nmstate/nmstate-handler-zcxkk" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.818632 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z5wk\" (UniqueName: \"kubernetes.io/projected/f174bd18-14b5-495f-8d34-795eca72dc06-kube-api-access-7z5wk\") pod \"nmstate-handler-zcxkk\" (UID: \"f174bd18-14b5-495f-8d34-795eca72dc06\") " pod="openshift-nmstate/nmstate-handler-zcxkk" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.818650 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/f174bd18-14b5-495f-8d34-795eca72dc06-nmstate-lock\") pod \"nmstate-handler-zcxkk\" (UID: \"f174bd18-14b5-495f-8d34-795eca72dc06\") " pod="openshift-nmstate/nmstate-handler-zcxkk" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.871707 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-ptzlb"] Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.873112 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ptzlb" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.876775 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-b59mz" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.877133 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.891949 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.919289 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gttd\" (UniqueName: \"kubernetes.io/projected/0899a565-e72a-498c-9071-7b05ccb027bd-kube-api-access-5gttd\") pod \"nmstate-metrics-fdff9cb8d-bcx7j\" (UID: \"0899a565-e72a-498c-9071-7b05ccb027bd\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-bcx7j" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.919456 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/f174bd18-14b5-495f-8d34-795eca72dc06-dbus-socket\") pod \"nmstate-handler-zcxkk\" (UID: \"f174bd18-14b5-495f-8d34-795eca72dc06\") " pod="openshift-nmstate/nmstate-handler-zcxkk" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.919546 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z5wk\" (UniqueName: \"kubernetes.io/projected/f174bd18-14b5-495f-8d34-795eca72dc06-kube-api-access-7z5wk\") pod \"nmstate-handler-zcxkk\" (UID: \"f174bd18-14b5-495f-8d34-795eca72dc06\") " pod="openshift-nmstate/nmstate-handler-zcxkk" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.919624 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/dd0f6b60-657a-4d34-a175-74f88f730669-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-ptzlb\" (UID: \"dd0f6b60-657a-4d34-a175-74f88f730669\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ptzlb" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.919708 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd0f6b60-657a-4d34-a175-74f88f730669-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-ptzlb\" (UID: \"dd0f6b60-657a-4d34-a175-74f88f730669\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ptzlb" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.919778 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/f174bd18-14b5-495f-8d34-795eca72dc06-nmstate-lock\") pod \"nmstate-handler-zcxkk\" (UID: \"f174bd18-14b5-495f-8d34-795eca72dc06\") " pod="openshift-nmstate/nmstate-handler-zcxkk" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.919861 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/f174bd18-14b5-495f-8d34-795eca72dc06-ovs-socket\") pod \"nmstate-handler-zcxkk\" (UID: \"f174bd18-14b5-495f-8d34-795eca72dc06\") " pod="openshift-nmstate/nmstate-handler-zcxkk" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.919901 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/f174bd18-14b5-495f-8d34-795eca72dc06-dbus-socket\") pod \"nmstate-handler-zcxkk\" (UID: \"f174bd18-14b5-495f-8d34-795eca72dc06\") " pod="openshift-nmstate/nmstate-handler-zcxkk" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.920047 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mrms\" (UniqueName: \"kubernetes.io/projected/89eda8ab-752d-4dc4-af4a-009431208f96-kube-api-access-8mrms\") pod \"nmstate-webhook-6cdbc54649-sfv4r\" (UID: \"89eda8ab-752d-4dc4-af4a-009431208f96\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sfv4r" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.920131 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/89eda8ab-752d-4dc4-af4a-009431208f96-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-sfv4r\" (UID: \"89eda8ab-752d-4dc4-af4a-009431208f96\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sfv4r" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.920206 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kc767\" (UniqueName: \"kubernetes.io/projected/dd0f6b60-657a-4d34-a175-74f88f730669-kube-api-access-kc767\") pod \"nmstate-console-plugin-6b874cbd85-ptzlb\" (UID: \"dd0f6b60-657a-4d34-a175-74f88f730669\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ptzlb" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.920223 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/f174bd18-14b5-495f-8d34-795eca72dc06-ovs-socket\") pod \"nmstate-handler-zcxkk\" (UID: \"f174bd18-14b5-495f-8d34-795eca72dc06\") " pod="openshift-nmstate/nmstate-handler-zcxkk" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.920244 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/f174bd18-14b5-495f-8d34-795eca72dc06-nmstate-lock\") pod \"nmstate-handler-zcxkk\" (UID: \"f174bd18-14b5-495f-8d34-795eca72dc06\") " pod="openshift-nmstate/nmstate-handler-zcxkk" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.923765 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-ptzlb"] Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.926142 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/89eda8ab-752d-4dc4-af4a-009431208f96-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-sfv4r\" (UID: \"89eda8ab-752d-4dc4-af4a-009431208f96\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sfv4r" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.943100 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z5wk\" (UniqueName: \"kubernetes.io/projected/f174bd18-14b5-495f-8d34-795eca72dc06-kube-api-access-7z5wk\") pod \"nmstate-handler-zcxkk\" (UID: \"f174bd18-14b5-495f-8d34-795eca72dc06\") " pod="openshift-nmstate/nmstate-handler-zcxkk" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.943835 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mrms\" (UniqueName: \"kubernetes.io/projected/89eda8ab-752d-4dc4-af4a-009431208f96-kube-api-access-8mrms\") pod \"nmstate-webhook-6cdbc54649-sfv4r\" (UID: \"89eda8ab-752d-4dc4-af4a-009431208f96\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sfv4r" Oct 09 09:14:59 crc kubenswrapper[4710]: I1009 09:14:59.947697 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gttd\" (UniqueName: \"kubernetes.io/projected/0899a565-e72a-498c-9071-7b05ccb027bd-kube-api-access-5gttd\") pod \"nmstate-metrics-fdff9cb8d-bcx7j\" (UID: \"0899a565-e72a-498c-9071-7b05ccb027bd\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-bcx7j" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.021168 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/dd0f6b60-657a-4d34-a175-74f88f730669-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-ptzlb\" (UID: \"dd0f6b60-657a-4d34-a175-74f88f730669\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ptzlb" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.021329 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd0f6b60-657a-4d34-a175-74f88f730669-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-ptzlb\" (UID: \"dd0f6b60-657a-4d34-a175-74f88f730669\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ptzlb" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.021641 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kc767\" (UniqueName: \"kubernetes.io/projected/dd0f6b60-657a-4d34-a175-74f88f730669-kube-api-access-kc767\") pod \"nmstate-console-plugin-6b874cbd85-ptzlb\" (UID: \"dd0f6b60-657a-4d34-a175-74f88f730669\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ptzlb" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.022374 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/dd0f6b60-657a-4d34-a175-74f88f730669-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-ptzlb\" (UID: \"dd0f6b60-657a-4d34-a175-74f88f730669\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ptzlb" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.024886 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd0f6b60-657a-4d34-a175-74f88f730669-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-ptzlb\" (UID: \"dd0f6b60-657a-4d34-a175-74f88f730669\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ptzlb" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.033830 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-bcx7j" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.051402 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kc767\" (UniqueName: \"kubernetes.io/projected/dd0f6b60-657a-4d34-a175-74f88f730669-kube-api-access-kc767\") pod \"nmstate-console-plugin-6b874cbd85-ptzlb\" (UID: \"dd0f6b60-657a-4d34-a175-74f88f730669\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ptzlb" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.052058 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sfv4r" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.092050 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-zcxkk" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.096393 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-6cf6c9dd57-mp99p"] Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.097165 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.111273 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6cf6c9dd57-mp99p"] Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.123895 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvqng\" (UniqueName: \"kubernetes.io/projected/a605f5f0-d2dc-4bfa-a274-4bdca840be26-kube-api-access-qvqng\") pod \"console-6cf6c9dd57-mp99p\" (UID: \"a605f5f0-d2dc-4bfa-a274-4bdca840be26\") " pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.123971 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a605f5f0-d2dc-4bfa-a274-4bdca840be26-console-serving-cert\") pod \"console-6cf6c9dd57-mp99p\" (UID: \"a605f5f0-d2dc-4bfa-a274-4bdca840be26\") " pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.124006 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a605f5f0-d2dc-4bfa-a274-4bdca840be26-oauth-serving-cert\") pod \"console-6cf6c9dd57-mp99p\" (UID: \"a605f5f0-d2dc-4bfa-a274-4bdca840be26\") " pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.124053 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a605f5f0-d2dc-4bfa-a274-4bdca840be26-service-ca\") pod \"console-6cf6c9dd57-mp99p\" (UID: \"a605f5f0-d2dc-4bfa-a274-4bdca840be26\") " pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.124136 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a605f5f0-d2dc-4bfa-a274-4bdca840be26-console-oauth-config\") pod \"console-6cf6c9dd57-mp99p\" (UID: \"a605f5f0-d2dc-4bfa-a274-4bdca840be26\") " pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.124198 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a605f5f0-d2dc-4bfa-a274-4bdca840be26-console-config\") pod \"console-6cf6c9dd57-mp99p\" (UID: \"a605f5f0-d2dc-4bfa-a274-4bdca840be26\") " pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.124229 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a605f5f0-d2dc-4bfa-a274-4bdca840be26-trusted-ca-bundle\") pod \"console-6cf6c9dd57-mp99p\" (UID: \"a605f5f0-d2dc-4bfa-a274-4bdca840be26\") " pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.182363 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333355-zg47v"] Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.183059 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333355-zg47v" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.186074 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.186131 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.187750 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ptzlb" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.210721 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333355-zg47v"] Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.226124 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a605f5f0-d2dc-4bfa-a274-4bdca840be26-service-ca\") pod \"console-6cf6c9dd57-mp99p\" (UID: \"a605f5f0-d2dc-4bfa-a274-4bdca840be26\") " pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.226416 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/94fa45a8-fffa-4fb1-bb72-8a21c2825e96-secret-volume\") pod \"collect-profiles-29333355-zg47v\" (UID: \"94fa45a8-fffa-4fb1-bb72-8a21c2825e96\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333355-zg47v" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.226469 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a605f5f0-d2dc-4bfa-a274-4bdca840be26-console-oauth-config\") pod \"console-6cf6c9dd57-mp99p\" (UID: \"a605f5f0-d2dc-4bfa-a274-4bdca840be26\") " pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.226591 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/94fa45a8-fffa-4fb1-bb72-8a21c2825e96-config-volume\") pod \"collect-profiles-29333355-zg47v\" (UID: \"94fa45a8-fffa-4fb1-bb72-8a21c2825e96\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333355-zg47v" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.226766 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a605f5f0-d2dc-4bfa-a274-4bdca840be26-console-config\") pod \"console-6cf6c9dd57-mp99p\" (UID: \"a605f5f0-d2dc-4bfa-a274-4bdca840be26\") " pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.226793 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a605f5f0-d2dc-4bfa-a274-4bdca840be26-trusted-ca-bundle\") pod \"console-6cf6c9dd57-mp99p\" (UID: \"a605f5f0-d2dc-4bfa-a274-4bdca840be26\") " pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.227030 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dl268\" (UniqueName: \"kubernetes.io/projected/94fa45a8-fffa-4fb1-bb72-8a21c2825e96-kube-api-access-dl268\") pod \"collect-profiles-29333355-zg47v\" (UID: \"94fa45a8-fffa-4fb1-bb72-8a21c2825e96\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333355-zg47v" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.227272 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvqng\" (UniqueName: \"kubernetes.io/projected/a605f5f0-d2dc-4bfa-a274-4bdca840be26-kube-api-access-qvqng\") pod \"console-6cf6c9dd57-mp99p\" (UID: \"a605f5f0-d2dc-4bfa-a274-4bdca840be26\") " pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.227301 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a605f5f0-d2dc-4bfa-a274-4bdca840be26-console-serving-cert\") pod \"console-6cf6c9dd57-mp99p\" (UID: \"a605f5f0-d2dc-4bfa-a274-4bdca840be26\") " pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.227541 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a605f5f0-d2dc-4bfa-a274-4bdca840be26-oauth-serving-cert\") pod \"console-6cf6c9dd57-mp99p\" (UID: \"a605f5f0-d2dc-4bfa-a274-4bdca840be26\") " pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.234651 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a605f5f0-d2dc-4bfa-a274-4bdca840be26-service-ca\") pod \"console-6cf6c9dd57-mp99p\" (UID: \"a605f5f0-d2dc-4bfa-a274-4bdca840be26\") " pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.236213 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a605f5f0-d2dc-4bfa-a274-4bdca840be26-console-config\") pod \"console-6cf6c9dd57-mp99p\" (UID: \"a605f5f0-d2dc-4bfa-a274-4bdca840be26\") " pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.237178 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a605f5f0-d2dc-4bfa-a274-4bdca840be26-trusted-ca-bundle\") pod \"console-6cf6c9dd57-mp99p\" (UID: \"a605f5f0-d2dc-4bfa-a274-4bdca840be26\") " pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.237774 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a605f5f0-d2dc-4bfa-a274-4bdca840be26-oauth-serving-cert\") pod \"console-6cf6c9dd57-mp99p\" (UID: \"a605f5f0-d2dc-4bfa-a274-4bdca840be26\") " pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.239408 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a605f5f0-d2dc-4bfa-a274-4bdca840be26-console-oauth-config\") pod \"console-6cf6c9dd57-mp99p\" (UID: \"a605f5f0-d2dc-4bfa-a274-4bdca840be26\") " pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.252828 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a605f5f0-d2dc-4bfa-a274-4bdca840be26-console-serving-cert\") pod \"console-6cf6c9dd57-mp99p\" (UID: \"a605f5f0-d2dc-4bfa-a274-4bdca840be26\") " pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.258766 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvqng\" (UniqueName: \"kubernetes.io/projected/a605f5f0-d2dc-4bfa-a274-4bdca840be26-kube-api-access-qvqng\") pod \"console-6cf6c9dd57-mp99p\" (UID: \"a605f5f0-d2dc-4bfa-a274-4bdca840be26\") " pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.330355 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/94fa45a8-fffa-4fb1-bb72-8a21c2825e96-config-volume\") pod \"collect-profiles-29333355-zg47v\" (UID: \"94fa45a8-fffa-4fb1-bb72-8a21c2825e96\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333355-zg47v" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.330483 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dl268\" (UniqueName: \"kubernetes.io/projected/94fa45a8-fffa-4fb1-bb72-8a21c2825e96-kube-api-access-dl268\") pod \"collect-profiles-29333355-zg47v\" (UID: \"94fa45a8-fffa-4fb1-bb72-8a21c2825e96\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333355-zg47v" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.330564 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/94fa45a8-fffa-4fb1-bb72-8a21c2825e96-secret-volume\") pod \"collect-profiles-29333355-zg47v\" (UID: \"94fa45a8-fffa-4fb1-bb72-8a21c2825e96\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333355-zg47v" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.331833 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/94fa45a8-fffa-4fb1-bb72-8a21c2825e96-config-volume\") pod \"collect-profiles-29333355-zg47v\" (UID: \"94fa45a8-fffa-4fb1-bb72-8a21c2825e96\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333355-zg47v" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.334060 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/94fa45a8-fffa-4fb1-bb72-8a21c2825e96-secret-volume\") pod \"collect-profiles-29333355-zg47v\" (UID: \"94fa45a8-fffa-4fb1-bb72-8a21c2825e96\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333355-zg47v" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.359079 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dl268\" (UniqueName: \"kubernetes.io/projected/94fa45a8-fffa-4fb1-bb72-8a21c2825e96-kube-api-access-dl268\") pod \"collect-profiles-29333355-zg47v\" (UID: \"94fa45a8-fffa-4fb1-bb72-8a21c2825e96\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333355-zg47v" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.434610 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.509935 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333355-zg47v" Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.545398 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-bcx7j"] Oct 09 09:15:00 crc kubenswrapper[4710]: W1009 09:15:00.552366 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0899a565_e72a_498c_9071_7b05ccb027bd.slice/crio-a918ce2a80b14a32d9e3d7a25fce240bccc237c28877d3daf1c5708fc32ccd90 WatchSource:0}: Error finding container a918ce2a80b14a32d9e3d7a25fce240bccc237c28877d3daf1c5708fc32ccd90: Status 404 returned error can't find the container with id a918ce2a80b14a32d9e3d7a25fce240bccc237c28877d3daf1c5708fc32ccd90 Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.616070 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-ptzlb"] Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.625158 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-sfv4r"] Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.657583 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6cf6c9dd57-mp99p"] Oct 09 09:15:00 crc kubenswrapper[4710]: W1009 09:15:00.664365 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda605f5f0_d2dc_4bfa_a274_4bdca840be26.slice/crio-82170d227e27ca38c6d65e8c71178a322635992d662077869a613b208a27ec89 WatchSource:0}: Error finding container 82170d227e27ca38c6d65e8c71178a322635992d662077869a613b208a27ec89: Status 404 returned error can't find the container with id 82170d227e27ca38c6d65e8c71178a322635992d662077869a613b208a27ec89 Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.719808 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333355-zg47v"] Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.771651 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-bcx7j" event={"ID":"0899a565-e72a-498c-9071-7b05ccb027bd","Type":"ContainerStarted","Data":"a918ce2a80b14a32d9e3d7a25fce240bccc237c28877d3daf1c5708fc32ccd90"} Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.772929 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6cf6c9dd57-mp99p" event={"ID":"a605f5f0-d2dc-4bfa-a274-4bdca840be26","Type":"ContainerStarted","Data":"82170d227e27ca38c6d65e8c71178a322635992d662077869a613b208a27ec89"} Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.774128 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-zcxkk" event={"ID":"f174bd18-14b5-495f-8d34-795eca72dc06","Type":"ContainerStarted","Data":"9240d638e6cae18e7ba3ab156a8e863314cee2d4bc297ce7975c84a29954c277"} Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.775929 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ptzlb" event={"ID":"dd0f6b60-657a-4d34-a175-74f88f730669","Type":"ContainerStarted","Data":"f949765359e4cbbfe093d8f59e62da789b1541c918fb93eadf1f7f6b10977b33"} Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.777147 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sfv4r" event={"ID":"89eda8ab-752d-4dc4-af4a-009431208f96","Type":"ContainerStarted","Data":"62be5ce42bf30d3e82a5cff3b56a541a936f2ae2ac8444cf604b7ab0f862a6a7"} Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.777995 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333355-zg47v" event={"ID":"94fa45a8-fffa-4fb1-bb72-8a21c2825e96","Type":"ContainerStarted","Data":"004dcac0e3b6c0a4d66de7fb68c4892a369c491dc2bb6078e695a94117adaf7b"} Oct 09 09:15:00 crc kubenswrapper[4710]: I1009 09:15:00.790584 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-6cf6c9dd57-mp99p" podStartSLOduration=0.790565885 podStartE2EDuration="790.565885ms" podCreationTimestamp="2025-10-09 09:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:15:00.788065952 +0000 UTC m=+624.278174349" watchObservedRunningTime="2025-10-09 09:15:00.790565885 +0000 UTC m=+624.280674281" Oct 09 09:15:01 crc kubenswrapper[4710]: I1009 09:15:01.789821 4710 generic.go:334] "Generic (PLEG): container finished" podID="94fa45a8-fffa-4fb1-bb72-8a21c2825e96" containerID="f201b183d7a14b739cab32485256fd2b368745fdd119ff9070fed1eb60a3374b" exitCode=0 Oct 09 09:15:01 crc kubenswrapper[4710]: I1009 09:15:01.789944 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333355-zg47v" event={"ID":"94fa45a8-fffa-4fb1-bb72-8a21c2825e96","Type":"ContainerDied","Data":"f201b183d7a14b739cab32485256fd2b368745fdd119ff9070fed1eb60a3374b"} Oct 09 09:15:01 crc kubenswrapper[4710]: I1009 09:15:01.793244 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6cf6c9dd57-mp99p" event={"ID":"a605f5f0-d2dc-4bfa-a274-4bdca840be26","Type":"ContainerStarted","Data":"73b168f4c009e63e77dedf9f333de57946aaed9e1d9850e8a2725e0ea25c0989"} Oct 09 09:15:03 crc kubenswrapper[4710]: I1009 09:15:03.123970 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333355-zg47v" Oct 09 09:15:03 crc kubenswrapper[4710]: I1009 09:15:03.170451 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/94fa45a8-fffa-4fb1-bb72-8a21c2825e96-secret-volume\") pod \"94fa45a8-fffa-4fb1-bb72-8a21c2825e96\" (UID: \"94fa45a8-fffa-4fb1-bb72-8a21c2825e96\") " Oct 09 09:15:03 crc kubenswrapper[4710]: I1009 09:15:03.170510 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/94fa45a8-fffa-4fb1-bb72-8a21c2825e96-config-volume\") pod \"94fa45a8-fffa-4fb1-bb72-8a21c2825e96\" (UID: \"94fa45a8-fffa-4fb1-bb72-8a21c2825e96\") " Oct 09 09:15:03 crc kubenswrapper[4710]: I1009 09:15:03.170563 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dl268\" (UniqueName: \"kubernetes.io/projected/94fa45a8-fffa-4fb1-bb72-8a21c2825e96-kube-api-access-dl268\") pod \"94fa45a8-fffa-4fb1-bb72-8a21c2825e96\" (UID: \"94fa45a8-fffa-4fb1-bb72-8a21c2825e96\") " Oct 09 09:15:03 crc kubenswrapper[4710]: I1009 09:15:03.171741 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94fa45a8-fffa-4fb1-bb72-8a21c2825e96-config-volume" (OuterVolumeSpecName: "config-volume") pod "94fa45a8-fffa-4fb1-bb72-8a21c2825e96" (UID: "94fa45a8-fffa-4fb1-bb72-8a21c2825e96"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:15:03 crc kubenswrapper[4710]: I1009 09:15:03.175792 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94fa45a8-fffa-4fb1-bb72-8a21c2825e96-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "94fa45a8-fffa-4fb1-bb72-8a21c2825e96" (UID: "94fa45a8-fffa-4fb1-bb72-8a21c2825e96"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:15:03 crc kubenswrapper[4710]: I1009 09:15:03.176116 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94fa45a8-fffa-4fb1-bb72-8a21c2825e96-kube-api-access-dl268" (OuterVolumeSpecName: "kube-api-access-dl268") pod "94fa45a8-fffa-4fb1-bb72-8a21c2825e96" (UID: "94fa45a8-fffa-4fb1-bb72-8a21c2825e96"). InnerVolumeSpecName "kube-api-access-dl268". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:15:03 crc kubenswrapper[4710]: I1009 09:15:03.273991 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dl268\" (UniqueName: \"kubernetes.io/projected/94fa45a8-fffa-4fb1-bb72-8a21c2825e96-kube-api-access-dl268\") on node \"crc\" DevicePath \"\"" Oct 09 09:15:03 crc kubenswrapper[4710]: I1009 09:15:03.274034 4710 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/94fa45a8-fffa-4fb1-bb72-8a21c2825e96-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 09 09:15:03 crc kubenswrapper[4710]: I1009 09:15:03.274048 4710 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/94fa45a8-fffa-4fb1-bb72-8a21c2825e96-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 09:15:03 crc kubenswrapper[4710]: I1009 09:15:03.822324 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333355-zg47v" event={"ID":"94fa45a8-fffa-4fb1-bb72-8a21c2825e96","Type":"ContainerDied","Data":"004dcac0e3b6c0a4d66de7fb68c4892a369c491dc2bb6078e695a94117adaf7b"} Oct 09 09:15:03 crc kubenswrapper[4710]: I1009 09:15:03.822625 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="004dcac0e3b6c0a4d66de7fb68c4892a369c491dc2bb6078e695a94117adaf7b" Oct 09 09:15:03 crc kubenswrapper[4710]: I1009 09:15:03.822410 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333355-zg47v" Oct 09 09:15:04 crc kubenswrapper[4710]: I1009 09:15:04.831060 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-bcx7j" event={"ID":"0899a565-e72a-498c-9071-7b05ccb027bd","Type":"ContainerStarted","Data":"92e6f1449a85cbcac37dfcf1a3a73764ad2998c8fbd98ec42aa51ed81018bd68"} Oct 09 09:15:04 crc kubenswrapper[4710]: I1009 09:15:04.832687 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-zcxkk" event={"ID":"f174bd18-14b5-495f-8d34-795eca72dc06","Type":"ContainerStarted","Data":"77caeea415bf36df421bc05bf4e60c35e4f5d1afa3fc64ac710616cbb50db1fc"} Oct 09 09:15:04 crc kubenswrapper[4710]: I1009 09:15:04.832780 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-zcxkk" Oct 09 09:15:04 crc kubenswrapper[4710]: I1009 09:15:04.834917 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ptzlb" event={"ID":"dd0f6b60-657a-4d34-a175-74f88f730669","Type":"ContainerStarted","Data":"af67a94a2e83de5ef1be5114d184215627f44a4395d34a81855cac338898ec63"} Oct 09 09:15:04 crc kubenswrapper[4710]: I1009 09:15:04.837095 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sfv4r" event={"ID":"89eda8ab-752d-4dc4-af4a-009431208f96","Type":"ContainerStarted","Data":"bbe27f62b029ca25e6a575a06688d810e23d9e6da98f645f1de5e42ccd125915"} Oct 09 09:15:04 crc kubenswrapper[4710]: I1009 09:15:04.837564 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sfv4r" Oct 09 09:15:04 crc kubenswrapper[4710]: I1009 09:15:04.869259 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-zcxkk" podStartSLOduration=2.314174063 podStartE2EDuration="5.869240827s" podCreationTimestamp="2025-10-09 09:14:59 +0000 UTC" firstStartedPulling="2025-10-09 09:15:00.159745432 +0000 UTC m=+623.649853829" lastFinishedPulling="2025-10-09 09:15:03.714812196 +0000 UTC m=+627.204920593" observedRunningTime="2025-10-09 09:15:04.851679989 +0000 UTC m=+628.341788386" watchObservedRunningTime="2025-10-09 09:15:04.869240827 +0000 UTC m=+628.359349225" Oct 09 09:15:04 crc kubenswrapper[4710]: I1009 09:15:04.869760 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sfv4r" podStartSLOduration=2.779940919 podStartE2EDuration="5.869755588s" podCreationTimestamp="2025-10-09 09:14:59 +0000 UTC" firstStartedPulling="2025-10-09 09:15:00.641456348 +0000 UTC m=+624.131564745" lastFinishedPulling="2025-10-09 09:15:03.731271018 +0000 UTC m=+627.221379414" observedRunningTime="2025-10-09 09:15:04.868351179 +0000 UTC m=+628.358459577" watchObservedRunningTime="2025-10-09 09:15:04.869755588 +0000 UTC m=+628.359863985" Oct 09 09:15:06 crc kubenswrapper[4710]: I1009 09:15:06.832849 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-ptzlb" podStartSLOduration=4.758839744 podStartE2EDuration="7.832832404s" podCreationTimestamp="2025-10-09 09:14:59 +0000 UTC" firstStartedPulling="2025-10-09 09:15:00.632288092 +0000 UTC m=+624.122396489" lastFinishedPulling="2025-10-09 09:15:03.706280752 +0000 UTC m=+627.196389149" observedRunningTime="2025-10-09 09:15:04.891633025 +0000 UTC m=+628.381741422" watchObservedRunningTime="2025-10-09 09:15:06.832832404 +0000 UTC m=+630.322940801" Oct 09 09:15:10 crc kubenswrapper[4710]: I1009 09:15:10.143171 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-zcxkk" Oct 09 09:15:10 crc kubenswrapper[4710]: I1009 09:15:10.435980 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:10 crc kubenswrapper[4710]: I1009 09:15:10.436030 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:10 crc kubenswrapper[4710]: I1009 09:15:10.440524 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:10 crc kubenswrapper[4710]: I1009 09:15:10.876381 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-bcx7j" event={"ID":"0899a565-e72a-498c-9071-7b05ccb027bd","Type":"ContainerStarted","Data":"6110cc8bb6db2de7f6bd2d4b9431ebe6af9bd95cd069a533676c85dbc396bfe2"} Oct 09 09:15:10 crc kubenswrapper[4710]: I1009 09:15:10.880279 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-6cf6c9dd57-mp99p" Oct 09 09:15:10 crc kubenswrapper[4710]: I1009 09:15:10.896182 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-bcx7j" podStartSLOduration=2.318252542 podStartE2EDuration="11.89615446s" podCreationTimestamp="2025-10-09 09:14:59 +0000 UTC" firstStartedPulling="2025-10-09 09:15:00.554836657 +0000 UTC m=+624.044945054" lastFinishedPulling="2025-10-09 09:15:10.132738575 +0000 UTC m=+633.622846972" observedRunningTime="2025-10-09 09:15:10.892991487 +0000 UTC m=+634.383099884" watchObservedRunningTime="2025-10-09 09:15:10.89615446 +0000 UTC m=+634.386262857" Oct 09 09:15:10 crc kubenswrapper[4710]: I1009 09:15:10.941337 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-5q44l"] Oct 09 09:15:20 crc kubenswrapper[4710]: I1009 09:15:20.056289 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-sfv4r" Oct 09 09:15:31 crc kubenswrapper[4710]: I1009 09:15:31.227122 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7"] Oct 09 09:15:31 crc kubenswrapper[4710]: E1009 09:15:31.227917 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94fa45a8-fffa-4fb1-bb72-8a21c2825e96" containerName="collect-profiles" Oct 09 09:15:31 crc kubenswrapper[4710]: I1009 09:15:31.227934 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="94fa45a8-fffa-4fb1-bb72-8a21c2825e96" containerName="collect-profiles" Oct 09 09:15:31 crc kubenswrapper[4710]: I1009 09:15:31.228033 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="94fa45a8-fffa-4fb1-bb72-8a21c2825e96" containerName="collect-profiles" Oct 09 09:15:31 crc kubenswrapper[4710]: I1009 09:15:31.228789 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7" Oct 09 09:15:31 crc kubenswrapper[4710]: I1009 09:15:31.231639 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 09 09:15:31 crc kubenswrapper[4710]: I1009 09:15:31.237112 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7"] Oct 09 09:15:31 crc kubenswrapper[4710]: I1009 09:15:31.264423 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbd7d\" (UniqueName: \"kubernetes.io/projected/372d2b61-0177-4956-bcab-23bee5bfd490-kube-api-access-lbd7d\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7\" (UID: \"372d2b61-0177-4956-bcab-23bee5bfd490\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7" Oct 09 09:15:31 crc kubenswrapper[4710]: I1009 09:15:31.264554 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/372d2b61-0177-4956-bcab-23bee5bfd490-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7\" (UID: \"372d2b61-0177-4956-bcab-23bee5bfd490\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7" Oct 09 09:15:31 crc kubenswrapper[4710]: I1009 09:15:31.264602 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/372d2b61-0177-4956-bcab-23bee5bfd490-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7\" (UID: \"372d2b61-0177-4956-bcab-23bee5bfd490\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7" Oct 09 09:15:31 crc kubenswrapper[4710]: I1009 09:15:31.366078 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbd7d\" (UniqueName: \"kubernetes.io/projected/372d2b61-0177-4956-bcab-23bee5bfd490-kube-api-access-lbd7d\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7\" (UID: \"372d2b61-0177-4956-bcab-23bee5bfd490\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7" Oct 09 09:15:31 crc kubenswrapper[4710]: I1009 09:15:31.366146 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/372d2b61-0177-4956-bcab-23bee5bfd490-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7\" (UID: \"372d2b61-0177-4956-bcab-23bee5bfd490\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7" Oct 09 09:15:31 crc kubenswrapper[4710]: I1009 09:15:31.366175 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/372d2b61-0177-4956-bcab-23bee5bfd490-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7\" (UID: \"372d2b61-0177-4956-bcab-23bee5bfd490\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7" Oct 09 09:15:31 crc kubenswrapper[4710]: I1009 09:15:31.366696 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/372d2b61-0177-4956-bcab-23bee5bfd490-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7\" (UID: \"372d2b61-0177-4956-bcab-23bee5bfd490\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7" Oct 09 09:15:31 crc kubenswrapper[4710]: I1009 09:15:31.366767 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/372d2b61-0177-4956-bcab-23bee5bfd490-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7\" (UID: \"372d2b61-0177-4956-bcab-23bee5bfd490\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7" Oct 09 09:15:31 crc kubenswrapper[4710]: I1009 09:15:31.392253 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbd7d\" (UniqueName: \"kubernetes.io/projected/372d2b61-0177-4956-bcab-23bee5bfd490-kube-api-access-lbd7d\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7\" (UID: \"372d2b61-0177-4956-bcab-23bee5bfd490\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7" Oct 09 09:15:31 crc kubenswrapper[4710]: I1009 09:15:31.549252 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7" Oct 09 09:15:31 crc kubenswrapper[4710]: I1009 09:15:31.707529 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7"] Oct 09 09:15:31 crc kubenswrapper[4710]: I1009 09:15:31.993618 4710 generic.go:334] "Generic (PLEG): container finished" podID="372d2b61-0177-4956-bcab-23bee5bfd490" containerID="d2cecd21ea73a87286e4543a272bbed7b8a479e56b1773023f8fcae297febb1a" exitCode=0 Oct 09 09:15:31 crc kubenswrapper[4710]: I1009 09:15:31.993899 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7" event={"ID":"372d2b61-0177-4956-bcab-23bee5bfd490","Type":"ContainerDied","Data":"d2cecd21ea73a87286e4543a272bbed7b8a479e56b1773023f8fcae297febb1a"} Oct 09 09:15:31 crc kubenswrapper[4710]: I1009 09:15:31.993980 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7" event={"ID":"372d2b61-0177-4956-bcab-23bee5bfd490","Type":"ContainerStarted","Data":"9b6ff948c1e5b1ec3b210dd4684b6648c211360526e9a02c8c7834517c5ab82f"} Oct 09 09:15:34 crc kubenswrapper[4710]: I1009 09:15:34.007577 4710 generic.go:334] "Generic (PLEG): container finished" podID="372d2b61-0177-4956-bcab-23bee5bfd490" containerID="50a6974f940e0b195bd954f31302fd5435f3d5e4fb67cefcfd52aa75b87d31d3" exitCode=0 Oct 09 09:15:34 crc kubenswrapper[4710]: I1009 09:15:34.007699 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7" event={"ID":"372d2b61-0177-4956-bcab-23bee5bfd490","Type":"ContainerDied","Data":"50a6974f940e0b195bd954f31302fd5435f3d5e4fb67cefcfd52aa75b87d31d3"} Oct 09 09:15:35 crc kubenswrapper[4710]: I1009 09:15:35.017101 4710 generic.go:334] "Generic (PLEG): container finished" podID="372d2b61-0177-4956-bcab-23bee5bfd490" containerID="a81e0d9e6e1d30a754a5239fb4401edbfa4404347fa4290e0551ff11825b7aee" exitCode=0 Oct 09 09:15:35 crc kubenswrapper[4710]: I1009 09:15:35.017145 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7" event={"ID":"372d2b61-0177-4956-bcab-23bee5bfd490","Type":"ContainerDied","Data":"a81e0d9e6e1d30a754a5239fb4401edbfa4404347fa4290e0551ff11825b7aee"} Oct 09 09:15:35 crc kubenswrapper[4710]: I1009 09:15:35.986805 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-5q44l" podUID="0ee99015-bffc-4ffb-a91c-f941cd33acd7" containerName="console" containerID="cri-o://429a5b39b22217b58be51cffc574ec318f2dd0786dc7c00312f3de7edd8af5a7" gracePeriod=15 Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.245280 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.299651 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-5q44l_0ee99015-bffc-4ffb-a91c-f941cd33acd7/console/0.log" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.299731 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.433319 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbd7d\" (UniqueName: \"kubernetes.io/projected/372d2b61-0177-4956-bcab-23bee5bfd490-kube-api-access-lbd7d\") pod \"372d2b61-0177-4956-bcab-23bee5bfd490\" (UID: \"372d2b61-0177-4956-bcab-23bee5bfd490\") " Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.433376 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-oauth-serving-cert\") pod \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.433423 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0ee99015-bffc-4ffb-a91c-f941cd33acd7-console-serving-cert\") pod \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.433500 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0ee99015-bffc-4ffb-a91c-f941cd33acd7-console-oauth-config\") pod \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.433523 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-console-config\") pod \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.434686 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/372d2b61-0177-4956-bcab-23bee5bfd490-util\") pod \"372d2b61-0177-4956-bcab-23bee5bfd490\" (UID: \"372d2b61-0177-4956-bcab-23bee5bfd490\") " Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.434327 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-console-config" (OuterVolumeSpecName: "console-config") pod "0ee99015-bffc-4ffb-a91c-f941cd33acd7" (UID: "0ee99015-bffc-4ffb-a91c-f941cd33acd7"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.434736 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9bxg\" (UniqueName: \"kubernetes.io/projected/0ee99015-bffc-4ffb-a91c-f941cd33acd7-kube-api-access-m9bxg\") pod \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.434373 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "0ee99015-bffc-4ffb-a91c-f941cd33acd7" (UID: "0ee99015-bffc-4ffb-a91c-f941cd33acd7"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.434797 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/372d2b61-0177-4956-bcab-23bee5bfd490-bundle\") pod \"372d2b61-0177-4956-bcab-23bee5bfd490\" (UID: \"372d2b61-0177-4956-bcab-23bee5bfd490\") " Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.434907 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-service-ca\") pod \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.435072 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-trusted-ca-bundle\") pod \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\" (UID: \"0ee99015-bffc-4ffb-a91c-f941cd33acd7\") " Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.435216 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-service-ca" (OuterVolumeSpecName: "service-ca") pod "0ee99015-bffc-4ffb-a91c-f941cd33acd7" (UID: "0ee99015-bffc-4ffb-a91c-f941cd33acd7"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.435400 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "0ee99015-bffc-4ffb-a91c-f941cd33acd7" (UID: "0ee99015-bffc-4ffb-a91c-f941cd33acd7"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.435641 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/372d2b61-0177-4956-bcab-23bee5bfd490-bundle" (OuterVolumeSpecName: "bundle") pod "372d2b61-0177-4956-bcab-23bee5bfd490" (UID: "372d2b61-0177-4956-bcab-23bee5bfd490"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.435647 4710 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.435710 4710 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.435725 4710 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-console-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.435735 4710 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0ee99015-bffc-4ffb-a91c-f941cd33acd7-service-ca\") on node \"crc\" DevicePath \"\"" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.440037 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ee99015-bffc-4ffb-a91c-f941cd33acd7-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "0ee99015-bffc-4ffb-a91c-f941cd33acd7" (UID: "0ee99015-bffc-4ffb-a91c-f941cd33acd7"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.440080 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/372d2b61-0177-4956-bcab-23bee5bfd490-kube-api-access-lbd7d" (OuterVolumeSpecName: "kube-api-access-lbd7d") pod "372d2b61-0177-4956-bcab-23bee5bfd490" (UID: "372d2b61-0177-4956-bcab-23bee5bfd490"). InnerVolumeSpecName "kube-api-access-lbd7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.440486 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ee99015-bffc-4ffb-a91c-f941cd33acd7-kube-api-access-m9bxg" (OuterVolumeSpecName: "kube-api-access-m9bxg") pod "0ee99015-bffc-4ffb-a91c-f941cd33acd7" (UID: "0ee99015-bffc-4ffb-a91c-f941cd33acd7"). InnerVolumeSpecName "kube-api-access-m9bxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.440693 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ee99015-bffc-4ffb-a91c-f941cd33acd7-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "0ee99015-bffc-4ffb-a91c-f941cd33acd7" (UID: "0ee99015-bffc-4ffb-a91c-f941cd33acd7"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.447374 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/372d2b61-0177-4956-bcab-23bee5bfd490-util" (OuterVolumeSpecName: "util") pod "372d2b61-0177-4956-bcab-23bee5bfd490" (UID: "372d2b61-0177-4956-bcab-23bee5bfd490"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.536853 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbd7d\" (UniqueName: \"kubernetes.io/projected/372d2b61-0177-4956-bcab-23bee5bfd490-kube-api-access-lbd7d\") on node \"crc\" DevicePath \"\"" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.536892 4710 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/0ee99015-bffc-4ffb-a91c-f941cd33acd7-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.536908 4710 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/0ee99015-bffc-4ffb-a91c-f941cd33acd7-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.536920 4710 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/372d2b61-0177-4956-bcab-23bee5bfd490-util\") on node \"crc\" DevicePath \"\"" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.536933 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9bxg\" (UniqueName: \"kubernetes.io/projected/0ee99015-bffc-4ffb-a91c-f941cd33acd7-kube-api-access-m9bxg\") on node \"crc\" DevicePath \"\"" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.536946 4710 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/372d2b61-0177-4956-bcab-23bee5bfd490-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:15:36 crc kubenswrapper[4710]: I1009 09:15:36.993778 4710 scope.go:117] "RemoveContainer" containerID="429a5b39b22217b58be51cffc574ec318f2dd0786dc7c00312f3de7edd8af5a7" Oct 09 09:15:37 crc kubenswrapper[4710]: I1009 09:15:37.027013 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-5q44l" event={"ID":"0ee99015-bffc-4ffb-a91c-f941cd33acd7","Type":"ContainerDied","Data":"429a5b39b22217b58be51cffc574ec318f2dd0786dc7c00312f3de7edd8af5a7"} Oct 09 09:15:37 crc kubenswrapper[4710]: I1009 09:15:37.027070 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-5q44l" event={"ID":"0ee99015-bffc-4ffb-a91c-f941cd33acd7","Type":"ContainerDied","Data":"3a68aeefe4a54f081c5eeb9d0a96928612b7dff0b07e1f34fa935d4753f56a37"} Oct 09 09:15:37 crc kubenswrapper[4710]: I1009 09:15:37.028924 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-5q44l" Oct 09 09:15:37 crc kubenswrapper[4710]: I1009 09:15:37.029601 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7" Oct 09 09:15:37 crc kubenswrapper[4710]: I1009 09:15:37.029848 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7" event={"ID":"372d2b61-0177-4956-bcab-23bee5bfd490","Type":"ContainerDied","Data":"9b6ff948c1e5b1ec3b210dd4684b6648c211360526e9a02c8c7834517c5ab82f"} Oct 09 09:15:37 crc kubenswrapper[4710]: I1009 09:15:37.029895 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b6ff948c1e5b1ec3b210dd4684b6648c211360526e9a02c8c7834517c5ab82f" Oct 09 09:15:37 crc kubenswrapper[4710]: I1009 09:15:37.062415 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-5q44l"] Oct 09 09:15:37 crc kubenswrapper[4710]: I1009 09:15:37.067069 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-5q44l"] Oct 09 09:15:38 crc kubenswrapper[4710]: I1009 09:15:38.820685 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ee99015-bffc-4ffb-a91c-f941cd33acd7" path="/var/lib/kubelet/pods/0ee99015-bffc-4ffb-a91c-f941cd33acd7/volumes" Oct 09 09:15:46 crc kubenswrapper[4710]: I1009 09:15:46.915734 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-7cb7c7d4ff-szp6n"] Oct 09 09:15:46 crc kubenswrapper[4710]: E1009 09:15:46.916335 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="372d2b61-0177-4956-bcab-23bee5bfd490" containerName="extract" Oct 09 09:15:46 crc kubenswrapper[4710]: I1009 09:15:46.916347 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="372d2b61-0177-4956-bcab-23bee5bfd490" containerName="extract" Oct 09 09:15:46 crc kubenswrapper[4710]: E1009 09:15:46.916365 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ee99015-bffc-4ffb-a91c-f941cd33acd7" containerName="console" Oct 09 09:15:46 crc kubenswrapper[4710]: I1009 09:15:46.916370 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ee99015-bffc-4ffb-a91c-f941cd33acd7" containerName="console" Oct 09 09:15:46 crc kubenswrapper[4710]: E1009 09:15:46.916376 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="372d2b61-0177-4956-bcab-23bee5bfd490" containerName="util" Oct 09 09:15:46 crc kubenswrapper[4710]: I1009 09:15:46.916381 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="372d2b61-0177-4956-bcab-23bee5bfd490" containerName="util" Oct 09 09:15:46 crc kubenswrapper[4710]: E1009 09:15:46.916390 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="372d2b61-0177-4956-bcab-23bee5bfd490" containerName="pull" Oct 09 09:15:46 crc kubenswrapper[4710]: I1009 09:15:46.916395 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="372d2b61-0177-4956-bcab-23bee5bfd490" containerName="pull" Oct 09 09:15:46 crc kubenswrapper[4710]: I1009 09:15:46.916490 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="372d2b61-0177-4956-bcab-23bee5bfd490" containerName="extract" Oct 09 09:15:46 crc kubenswrapper[4710]: I1009 09:15:46.916504 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ee99015-bffc-4ffb-a91c-f941cd33acd7" containerName="console" Oct 09 09:15:46 crc kubenswrapper[4710]: I1009 09:15:46.916825 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7cb7c7d4ff-szp6n" Oct 09 09:15:46 crc kubenswrapper[4710]: I1009 09:15:46.919064 4710 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Oct 09 09:15:46 crc kubenswrapper[4710]: I1009 09:15:46.919289 4710 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-w7zwx" Oct 09 09:15:46 crc kubenswrapper[4710]: I1009 09:15:46.919395 4710 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Oct 09 09:15:46 crc kubenswrapper[4710]: I1009 09:15:46.919523 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Oct 09 09:15:46 crc kubenswrapper[4710]: I1009 09:15:46.919623 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Oct 09 09:15:46 crc kubenswrapper[4710]: I1009 09:15:46.931520 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7cb7c7d4ff-szp6n"] Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.061277 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/95157129-8087-4ba4-9b97-980dc6f6d88d-webhook-cert\") pod \"metallb-operator-controller-manager-7cb7c7d4ff-szp6n\" (UID: \"95157129-8087-4ba4-9b97-980dc6f6d88d\") " pod="metallb-system/metallb-operator-controller-manager-7cb7c7d4ff-szp6n" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.061563 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/95157129-8087-4ba4-9b97-980dc6f6d88d-apiservice-cert\") pod \"metallb-operator-controller-manager-7cb7c7d4ff-szp6n\" (UID: \"95157129-8087-4ba4-9b97-980dc6f6d88d\") " pod="metallb-system/metallb-operator-controller-manager-7cb7c7d4ff-szp6n" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.061638 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n47p5\" (UniqueName: \"kubernetes.io/projected/95157129-8087-4ba4-9b97-980dc6f6d88d-kube-api-access-n47p5\") pod \"metallb-operator-controller-manager-7cb7c7d4ff-szp6n\" (UID: \"95157129-8087-4ba4-9b97-980dc6f6d88d\") " pod="metallb-system/metallb-operator-controller-manager-7cb7c7d4ff-szp6n" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.162407 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/95157129-8087-4ba4-9b97-980dc6f6d88d-webhook-cert\") pod \"metallb-operator-controller-manager-7cb7c7d4ff-szp6n\" (UID: \"95157129-8087-4ba4-9b97-980dc6f6d88d\") " pod="metallb-system/metallb-operator-controller-manager-7cb7c7d4ff-szp6n" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.162479 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/95157129-8087-4ba4-9b97-980dc6f6d88d-apiservice-cert\") pod \"metallb-operator-controller-manager-7cb7c7d4ff-szp6n\" (UID: \"95157129-8087-4ba4-9b97-980dc6f6d88d\") " pod="metallb-system/metallb-operator-controller-manager-7cb7c7d4ff-szp6n" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.162512 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n47p5\" (UniqueName: \"kubernetes.io/projected/95157129-8087-4ba4-9b97-980dc6f6d88d-kube-api-access-n47p5\") pod \"metallb-operator-controller-manager-7cb7c7d4ff-szp6n\" (UID: \"95157129-8087-4ba4-9b97-980dc6f6d88d\") " pod="metallb-system/metallb-operator-controller-manager-7cb7c7d4ff-szp6n" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.169111 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/95157129-8087-4ba4-9b97-980dc6f6d88d-webhook-cert\") pod \"metallb-operator-controller-manager-7cb7c7d4ff-szp6n\" (UID: \"95157129-8087-4ba4-9b97-980dc6f6d88d\") " pod="metallb-system/metallb-operator-controller-manager-7cb7c7d4ff-szp6n" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.169530 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/95157129-8087-4ba4-9b97-980dc6f6d88d-apiservice-cert\") pod \"metallb-operator-controller-manager-7cb7c7d4ff-szp6n\" (UID: \"95157129-8087-4ba4-9b97-980dc6f6d88d\") " pod="metallb-system/metallb-operator-controller-manager-7cb7c7d4ff-szp6n" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.200025 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n47p5\" (UniqueName: \"kubernetes.io/projected/95157129-8087-4ba4-9b97-980dc6f6d88d-kube-api-access-n47p5\") pod \"metallb-operator-controller-manager-7cb7c7d4ff-szp6n\" (UID: \"95157129-8087-4ba4-9b97-980dc6f6d88d\") " pod="metallb-system/metallb-operator-controller-manager-7cb7c7d4ff-szp6n" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.232110 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7cb7c7d4ff-szp6n" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.301139 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-86bdd9545f-gp42b"] Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.302224 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-86bdd9545f-gp42b" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.304867 4710 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-n7p6z" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.304929 4710 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.306685 4710 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.364718 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-86bdd9545f-gp42b"] Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.465886 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5l56\" (UniqueName: \"kubernetes.io/projected/89f2b083-5809-4a5b-9c55-75bb2c0807a8-kube-api-access-l5l56\") pod \"metallb-operator-webhook-server-86bdd9545f-gp42b\" (UID: \"89f2b083-5809-4a5b-9c55-75bb2c0807a8\") " pod="metallb-system/metallb-operator-webhook-server-86bdd9545f-gp42b" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.465960 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/89f2b083-5809-4a5b-9c55-75bb2c0807a8-webhook-cert\") pod \"metallb-operator-webhook-server-86bdd9545f-gp42b\" (UID: \"89f2b083-5809-4a5b-9c55-75bb2c0807a8\") " pod="metallb-system/metallb-operator-webhook-server-86bdd9545f-gp42b" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.465994 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/89f2b083-5809-4a5b-9c55-75bb2c0807a8-apiservice-cert\") pod \"metallb-operator-webhook-server-86bdd9545f-gp42b\" (UID: \"89f2b083-5809-4a5b-9c55-75bb2c0807a8\") " pod="metallb-system/metallb-operator-webhook-server-86bdd9545f-gp42b" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.549330 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7cb7c7d4ff-szp6n"] Oct 09 09:15:47 crc kubenswrapper[4710]: W1009 09:15:47.555275 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod95157129_8087_4ba4_9b97_980dc6f6d88d.slice/crio-40d9efa7893a2a40bc6318816dc35d593b8aea6a81fc5978c0c2965c44ad7dd1 WatchSource:0}: Error finding container 40d9efa7893a2a40bc6318816dc35d593b8aea6a81fc5978c0c2965c44ad7dd1: Status 404 returned error can't find the container with id 40d9efa7893a2a40bc6318816dc35d593b8aea6a81fc5978c0c2965c44ad7dd1 Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.566787 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/89f2b083-5809-4a5b-9c55-75bb2c0807a8-webhook-cert\") pod \"metallb-operator-webhook-server-86bdd9545f-gp42b\" (UID: \"89f2b083-5809-4a5b-9c55-75bb2c0807a8\") " pod="metallb-system/metallb-operator-webhook-server-86bdd9545f-gp42b" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.566834 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/89f2b083-5809-4a5b-9c55-75bb2c0807a8-apiservice-cert\") pod \"metallb-operator-webhook-server-86bdd9545f-gp42b\" (UID: \"89f2b083-5809-4a5b-9c55-75bb2c0807a8\") " pod="metallb-system/metallb-operator-webhook-server-86bdd9545f-gp42b" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.566879 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5l56\" (UniqueName: \"kubernetes.io/projected/89f2b083-5809-4a5b-9c55-75bb2c0807a8-kube-api-access-l5l56\") pod \"metallb-operator-webhook-server-86bdd9545f-gp42b\" (UID: \"89f2b083-5809-4a5b-9c55-75bb2c0807a8\") " pod="metallb-system/metallb-operator-webhook-server-86bdd9545f-gp42b" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.571542 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/89f2b083-5809-4a5b-9c55-75bb2c0807a8-webhook-cert\") pod \"metallb-operator-webhook-server-86bdd9545f-gp42b\" (UID: \"89f2b083-5809-4a5b-9c55-75bb2c0807a8\") " pod="metallb-system/metallb-operator-webhook-server-86bdd9545f-gp42b" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.571552 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/89f2b083-5809-4a5b-9c55-75bb2c0807a8-apiservice-cert\") pod \"metallb-operator-webhook-server-86bdd9545f-gp42b\" (UID: \"89f2b083-5809-4a5b-9c55-75bb2c0807a8\") " pod="metallb-system/metallb-operator-webhook-server-86bdd9545f-gp42b" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.580028 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5l56\" (UniqueName: \"kubernetes.io/projected/89f2b083-5809-4a5b-9c55-75bb2c0807a8-kube-api-access-l5l56\") pod \"metallb-operator-webhook-server-86bdd9545f-gp42b\" (UID: \"89f2b083-5809-4a5b-9c55-75bb2c0807a8\") " pod="metallb-system/metallb-operator-webhook-server-86bdd9545f-gp42b" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.618402 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-86bdd9545f-gp42b" Oct 09 09:15:47 crc kubenswrapper[4710]: I1009 09:15:47.808166 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-86bdd9545f-gp42b"] Oct 09 09:15:47 crc kubenswrapper[4710]: W1009 09:15:47.817494 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89f2b083_5809_4a5b_9c55_75bb2c0807a8.slice/crio-6ae291714d780fa3e16543ef865009376f7b2290bc232ab082080460201b0d5b WatchSource:0}: Error finding container 6ae291714d780fa3e16543ef865009376f7b2290bc232ab082080460201b0d5b: Status 404 returned error can't find the container with id 6ae291714d780fa3e16543ef865009376f7b2290bc232ab082080460201b0d5b Oct 09 09:15:48 crc kubenswrapper[4710]: I1009 09:15:48.085318 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-86bdd9545f-gp42b" event={"ID":"89f2b083-5809-4a5b-9c55-75bb2c0807a8","Type":"ContainerStarted","Data":"6ae291714d780fa3e16543ef865009376f7b2290bc232ab082080460201b0d5b"} Oct 09 09:15:48 crc kubenswrapper[4710]: I1009 09:15:48.086707 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7cb7c7d4ff-szp6n" event={"ID":"95157129-8087-4ba4-9b97-980dc6f6d88d","Type":"ContainerStarted","Data":"40d9efa7893a2a40bc6318816dc35d593b8aea6a81fc5978c0c2965c44ad7dd1"} Oct 09 09:15:51 crc kubenswrapper[4710]: I1009 09:15:51.113265 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7cb7c7d4ff-szp6n" event={"ID":"95157129-8087-4ba4-9b97-980dc6f6d88d","Type":"ContainerStarted","Data":"0f4740d79a807e6fe53c26c7ddc87145e16a8c08f192cc6966b0637a9974d60d"} Oct 09 09:15:51 crc kubenswrapper[4710]: I1009 09:15:51.114499 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-7cb7c7d4ff-szp6n" Oct 09 09:15:51 crc kubenswrapper[4710]: I1009 09:15:51.156413 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-7cb7c7d4ff-szp6n" podStartSLOduration=2.342404008 podStartE2EDuration="5.156395499s" podCreationTimestamp="2025-10-09 09:15:46 +0000 UTC" firstStartedPulling="2025-10-09 09:15:47.558943401 +0000 UTC m=+671.049051798" lastFinishedPulling="2025-10-09 09:15:50.372934892 +0000 UTC m=+673.863043289" observedRunningTime="2025-10-09 09:15:51.146169317 +0000 UTC m=+674.636277714" watchObservedRunningTime="2025-10-09 09:15:51.156395499 +0000 UTC m=+674.646503896" Oct 09 09:15:53 crc kubenswrapper[4710]: I1009 09:15:53.127873 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-86bdd9545f-gp42b" event={"ID":"89f2b083-5809-4a5b-9c55-75bb2c0807a8","Type":"ContainerStarted","Data":"9682b781dbb7492166532eb0b726aafbf506197b3c1570ed0df1facb503f373b"} Oct 09 09:15:53 crc kubenswrapper[4710]: I1009 09:15:53.144496 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-86bdd9545f-gp42b" podStartSLOduration=1.711712989 podStartE2EDuration="6.14447486s" podCreationTimestamp="2025-10-09 09:15:47 +0000 UTC" firstStartedPulling="2025-10-09 09:15:47.821145303 +0000 UTC m=+671.311253700" lastFinishedPulling="2025-10-09 09:15:52.253907173 +0000 UTC m=+675.744015571" observedRunningTime="2025-10-09 09:15:53.142403043 +0000 UTC m=+676.632511440" watchObservedRunningTime="2025-10-09 09:15:53.14447486 +0000 UTC m=+676.634583256" Oct 09 09:15:54 crc kubenswrapper[4710]: I1009 09:15:54.132702 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-86bdd9545f-gp42b" Oct 09 09:16:05 crc kubenswrapper[4710]: I1009 09:16:05.546189 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:16:05 crc kubenswrapper[4710]: I1009 09:16:05.546763 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:16:07 crc kubenswrapper[4710]: I1009 09:16:07.624383 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-86bdd9545f-gp42b" Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.239760 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-7cb7c7d4ff-szp6n" Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.799622 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-mqt2m"] Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.801401 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.805977 4710 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.805982 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.806106 4710 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-psqkh" Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.831025 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-t2fhd"] Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.831702 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t2fhd" Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.833175 4710 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.847506 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-t2fhd"] Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.901207 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-6wvbb"] Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.902461 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-6wvbb" Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.908148 4710 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.908152 4710 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Oct 09 09:16:27 crc kubenswrapper[4710]: W1009 09:16:27.908185 4710 reflector.go:561] object-"metallb-system"/"speaker-dockercfg-9xjwg": failed to list *v1.Secret: secrets "speaker-dockercfg-9xjwg" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "metallb-system": no relationship found between node 'crc' and this object Oct 09 09:16:27 crc kubenswrapper[4710]: E1009 09:16:27.908307 4710 reflector.go:158] "Unhandled Error" err="object-\"metallb-system\"/\"speaker-dockercfg-9xjwg\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"speaker-dockercfg-9xjwg\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"metallb-system\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.908610 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.911750 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-68d546b9d8-fjp44"] Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.912740 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-fjp44" Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.916059 4710 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.938564 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-fjp44"] Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.939320 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aab75afc-7b04-4a39-afd5-3109512334d6-metrics-certs\") pod \"frr-k8s-mqt2m\" (UID: \"aab75afc-7b04-4a39-afd5-3109512334d6\") " pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.939362 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgtl9\" (UniqueName: \"kubernetes.io/projected/f03c49fa-bc71-4bac-b0bb-e25876b0cef7-kube-api-access-dgtl9\") pod \"frr-k8s-webhook-server-64bf5d555-t2fhd\" (UID: \"f03c49fa-bc71-4bac-b0bb-e25876b0cef7\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t2fhd" Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.939460 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/aab75afc-7b04-4a39-afd5-3109512334d6-frr-sockets\") pod \"frr-k8s-mqt2m\" (UID: \"aab75afc-7b04-4a39-afd5-3109512334d6\") " pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.939488 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/aab75afc-7b04-4a39-afd5-3109512334d6-metrics\") pod \"frr-k8s-mqt2m\" (UID: \"aab75afc-7b04-4a39-afd5-3109512334d6\") " pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.939548 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npfgs\" (UniqueName: \"kubernetes.io/projected/aab75afc-7b04-4a39-afd5-3109512334d6-kube-api-access-npfgs\") pod \"frr-k8s-mqt2m\" (UID: \"aab75afc-7b04-4a39-afd5-3109512334d6\") " pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.939579 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/aab75afc-7b04-4a39-afd5-3109512334d6-frr-startup\") pod \"frr-k8s-mqt2m\" (UID: \"aab75afc-7b04-4a39-afd5-3109512334d6\") " pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.939622 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/aab75afc-7b04-4a39-afd5-3109512334d6-frr-conf\") pod \"frr-k8s-mqt2m\" (UID: \"aab75afc-7b04-4a39-afd5-3109512334d6\") " pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.939658 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f03c49fa-bc71-4bac-b0bb-e25876b0cef7-cert\") pod \"frr-k8s-webhook-server-64bf5d555-t2fhd\" (UID: \"f03c49fa-bc71-4bac-b0bb-e25876b0cef7\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t2fhd" Oct 09 09:16:27 crc kubenswrapper[4710]: I1009 09:16:27.939681 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/aab75afc-7b04-4a39-afd5-3109512334d6-reloader\") pod \"frr-k8s-mqt2m\" (UID: \"aab75afc-7b04-4a39-afd5-3109512334d6\") " pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.041393 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/aab75afc-7b04-4a39-afd5-3109512334d6-frr-conf\") pod \"frr-k8s-mqt2m\" (UID: \"aab75afc-7b04-4a39-afd5-3109512334d6\") " pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.041455 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f03c49fa-bc71-4bac-b0bb-e25876b0cef7-cert\") pod \"frr-k8s-webhook-server-64bf5d555-t2fhd\" (UID: \"f03c49fa-bc71-4bac-b0bb-e25876b0cef7\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t2fhd" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.041478 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/aab75afc-7b04-4a39-afd5-3109512334d6-reloader\") pod \"frr-k8s-mqt2m\" (UID: \"aab75afc-7b04-4a39-afd5-3109512334d6\") " pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.041496 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aab75afc-7b04-4a39-afd5-3109512334d6-metrics-certs\") pod \"frr-k8s-mqt2m\" (UID: \"aab75afc-7b04-4a39-afd5-3109512334d6\") " pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.041512 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgtl9\" (UniqueName: \"kubernetes.io/projected/f03c49fa-bc71-4bac-b0bb-e25876b0cef7-kube-api-access-dgtl9\") pod \"frr-k8s-webhook-server-64bf5d555-t2fhd\" (UID: \"f03c49fa-bc71-4bac-b0bb-e25876b0cef7\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t2fhd" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.041535 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcjhv\" (UniqueName: \"kubernetes.io/projected/f1dc3816-2a79-4e41-8337-bc61c3bbafc8-kube-api-access-mcjhv\") pod \"speaker-6wvbb\" (UID: \"f1dc3816-2a79-4e41-8337-bc61c3bbafc8\") " pod="metallb-system/speaker-6wvbb" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.041561 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74b53642-04a4-4331-806b-c9f84d190746-metrics-certs\") pod \"controller-68d546b9d8-fjp44\" (UID: \"74b53642-04a4-4331-806b-c9f84d190746\") " pod="metallb-system/controller-68d546b9d8-fjp44" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.041587 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/aab75afc-7b04-4a39-afd5-3109512334d6-frr-sockets\") pod \"frr-k8s-mqt2m\" (UID: \"aab75afc-7b04-4a39-afd5-3109512334d6\") " pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.041606 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f1dc3816-2a79-4e41-8337-bc61c3bbafc8-memberlist\") pod \"speaker-6wvbb\" (UID: \"f1dc3816-2a79-4e41-8337-bc61c3bbafc8\") " pod="metallb-system/speaker-6wvbb" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.041621 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/aab75afc-7b04-4a39-afd5-3109512334d6-metrics\") pod \"frr-k8s-mqt2m\" (UID: \"aab75afc-7b04-4a39-afd5-3109512334d6\") " pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.041639 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f1dc3816-2a79-4e41-8337-bc61c3bbafc8-metallb-excludel2\") pod \"speaker-6wvbb\" (UID: \"f1dc3816-2a79-4e41-8337-bc61c3bbafc8\") " pod="metallb-system/speaker-6wvbb" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.041657 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f1dc3816-2a79-4e41-8337-bc61c3bbafc8-metrics-certs\") pod \"speaker-6wvbb\" (UID: \"f1dc3816-2a79-4e41-8337-bc61c3bbafc8\") " pod="metallb-system/speaker-6wvbb" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.041673 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/74b53642-04a4-4331-806b-c9f84d190746-cert\") pod \"controller-68d546b9d8-fjp44\" (UID: \"74b53642-04a4-4331-806b-c9f84d190746\") " pod="metallb-system/controller-68d546b9d8-fjp44" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.041694 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mx7rz\" (UniqueName: \"kubernetes.io/projected/74b53642-04a4-4331-806b-c9f84d190746-kube-api-access-mx7rz\") pod \"controller-68d546b9d8-fjp44\" (UID: \"74b53642-04a4-4331-806b-c9f84d190746\") " pod="metallb-system/controller-68d546b9d8-fjp44" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.041718 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npfgs\" (UniqueName: \"kubernetes.io/projected/aab75afc-7b04-4a39-afd5-3109512334d6-kube-api-access-npfgs\") pod \"frr-k8s-mqt2m\" (UID: \"aab75afc-7b04-4a39-afd5-3109512334d6\") " pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.041768 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/aab75afc-7b04-4a39-afd5-3109512334d6-frr-startup\") pod \"frr-k8s-mqt2m\" (UID: \"aab75afc-7b04-4a39-afd5-3109512334d6\") " pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.041851 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/aab75afc-7b04-4a39-afd5-3109512334d6-frr-conf\") pod \"frr-k8s-mqt2m\" (UID: \"aab75afc-7b04-4a39-afd5-3109512334d6\") " pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.041974 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/aab75afc-7b04-4a39-afd5-3109512334d6-frr-sockets\") pod \"frr-k8s-mqt2m\" (UID: \"aab75afc-7b04-4a39-afd5-3109512334d6\") " pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.042284 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/aab75afc-7b04-4a39-afd5-3109512334d6-reloader\") pod \"frr-k8s-mqt2m\" (UID: \"aab75afc-7b04-4a39-afd5-3109512334d6\") " pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.042599 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/aab75afc-7b04-4a39-afd5-3109512334d6-metrics\") pod \"frr-k8s-mqt2m\" (UID: \"aab75afc-7b04-4a39-afd5-3109512334d6\") " pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.042616 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/aab75afc-7b04-4a39-afd5-3109512334d6-frr-startup\") pod \"frr-k8s-mqt2m\" (UID: \"aab75afc-7b04-4a39-afd5-3109512334d6\") " pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.049860 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/aab75afc-7b04-4a39-afd5-3109512334d6-metrics-certs\") pod \"frr-k8s-mqt2m\" (UID: \"aab75afc-7b04-4a39-afd5-3109512334d6\") " pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.050913 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f03c49fa-bc71-4bac-b0bb-e25876b0cef7-cert\") pod \"frr-k8s-webhook-server-64bf5d555-t2fhd\" (UID: \"f03c49fa-bc71-4bac-b0bb-e25876b0cef7\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t2fhd" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.059179 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgtl9\" (UniqueName: \"kubernetes.io/projected/f03c49fa-bc71-4bac-b0bb-e25876b0cef7-kube-api-access-dgtl9\") pod \"frr-k8s-webhook-server-64bf5d555-t2fhd\" (UID: \"f03c49fa-bc71-4bac-b0bb-e25876b0cef7\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t2fhd" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.067620 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npfgs\" (UniqueName: \"kubernetes.io/projected/aab75afc-7b04-4a39-afd5-3109512334d6-kube-api-access-npfgs\") pod \"frr-k8s-mqt2m\" (UID: \"aab75afc-7b04-4a39-afd5-3109512334d6\") " pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.114654 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.142466 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f1dc3816-2a79-4e41-8337-bc61c3bbafc8-memberlist\") pod \"speaker-6wvbb\" (UID: \"f1dc3816-2a79-4e41-8337-bc61c3bbafc8\") " pod="metallb-system/speaker-6wvbb" Oct 09 09:16:28 crc kubenswrapper[4710]: E1009 09:16:28.142561 4710 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.142670 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f1dc3816-2a79-4e41-8337-bc61c3bbafc8-metallb-excludel2\") pod \"speaker-6wvbb\" (UID: \"f1dc3816-2a79-4e41-8337-bc61c3bbafc8\") " pod="metallb-system/speaker-6wvbb" Oct 09 09:16:28 crc kubenswrapper[4710]: E1009 09:16:28.142728 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f1dc3816-2a79-4e41-8337-bc61c3bbafc8-memberlist podName:f1dc3816-2a79-4e41-8337-bc61c3bbafc8 nodeName:}" failed. No retries permitted until 2025-10-09 09:16:28.642693116 +0000 UTC m=+712.132801513 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/f1dc3816-2a79-4e41-8337-bc61c3bbafc8-memberlist") pod "speaker-6wvbb" (UID: "f1dc3816-2a79-4e41-8337-bc61c3bbafc8") : secret "metallb-memberlist" not found Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.142931 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f1dc3816-2a79-4e41-8337-bc61c3bbafc8-metrics-certs\") pod \"speaker-6wvbb\" (UID: \"f1dc3816-2a79-4e41-8337-bc61c3bbafc8\") " pod="metallb-system/speaker-6wvbb" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.143030 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/74b53642-04a4-4331-806b-c9f84d190746-cert\") pod \"controller-68d546b9d8-fjp44\" (UID: \"74b53642-04a4-4331-806b-c9f84d190746\") " pod="metallb-system/controller-68d546b9d8-fjp44" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.143125 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mx7rz\" (UniqueName: \"kubernetes.io/projected/74b53642-04a4-4331-806b-c9f84d190746-kube-api-access-mx7rz\") pod \"controller-68d546b9d8-fjp44\" (UID: \"74b53642-04a4-4331-806b-c9f84d190746\") " pod="metallb-system/controller-68d546b9d8-fjp44" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.143298 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcjhv\" (UniqueName: \"kubernetes.io/projected/f1dc3816-2a79-4e41-8337-bc61c3bbafc8-kube-api-access-mcjhv\") pod \"speaker-6wvbb\" (UID: \"f1dc3816-2a79-4e41-8337-bc61c3bbafc8\") " pod="metallb-system/speaker-6wvbb" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.143388 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74b53642-04a4-4331-806b-c9f84d190746-metrics-certs\") pod \"controller-68d546b9d8-fjp44\" (UID: \"74b53642-04a4-4331-806b-c9f84d190746\") " pod="metallb-system/controller-68d546b9d8-fjp44" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.143420 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/f1dc3816-2a79-4e41-8337-bc61c3bbafc8-metallb-excludel2\") pod \"speaker-6wvbb\" (UID: \"f1dc3816-2a79-4e41-8337-bc61c3bbafc8\") " pod="metallb-system/speaker-6wvbb" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.146291 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t2fhd" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.147356 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/74b53642-04a4-4331-806b-c9f84d190746-cert\") pod \"controller-68d546b9d8-fjp44\" (UID: \"74b53642-04a4-4331-806b-c9f84d190746\") " pod="metallb-system/controller-68d546b9d8-fjp44" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.156950 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f1dc3816-2a79-4e41-8337-bc61c3bbafc8-metrics-certs\") pod \"speaker-6wvbb\" (UID: \"f1dc3816-2a79-4e41-8337-bc61c3bbafc8\") " pod="metallb-system/speaker-6wvbb" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.157020 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/74b53642-04a4-4331-806b-c9f84d190746-metrics-certs\") pod \"controller-68d546b9d8-fjp44\" (UID: \"74b53642-04a4-4331-806b-c9f84d190746\") " pod="metallb-system/controller-68d546b9d8-fjp44" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.159500 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mx7rz\" (UniqueName: \"kubernetes.io/projected/74b53642-04a4-4331-806b-c9f84d190746-kube-api-access-mx7rz\") pod \"controller-68d546b9d8-fjp44\" (UID: \"74b53642-04a4-4331-806b-c9f84d190746\") " pod="metallb-system/controller-68d546b9d8-fjp44" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.160165 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcjhv\" (UniqueName: \"kubernetes.io/projected/f1dc3816-2a79-4e41-8337-bc61c3bbafc8-kube-api-access-mcjhv\") pod \"speaker-6wvbb\" (UID: \"f1dc3816-2a79-4e41-8337-bc61c3bbafc8\") " pod="metallb-system/speaker-6wvbb" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.228340 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-fjp44" Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.319451 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mqt2m" event={"ID":"aab75afc-7b04-4a39-afd5-3109512334d6","Type":"ContainerStarted","Data":"6d9ee256f9aeed99171b34979b55a7bf685a279ac6df3e81607b44de62c0a5dc"} Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.534186 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-t2fhd"] Oct 09 09:16:28 crc kubenswrapper[4710]: W1009 09:16:28.541569 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf03c49fa_bc71_4bac_b0bb_e25876b0cef7.slice/crio-e18301394756fd05bebfae7d4873f994d8f32f21a12eac2deac95f092239fd69 WatchSource:0}: Error finding container e18301394756fd05bebfae7d4873f994d8f32f21a12eac2deac95f092239fd69: Status 404 returned error can't find the container with id e18301394756fd05bebfae7d4873f994d8f32f21a12eac2deac95f092239fd69 Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.621669 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-fjp44"] Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.649032 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f1dc3816-2a79-4e41-8337-bc61c3bbafc8-memberlist\") pod \"speaker-6wvbb\" (UID: \"f1dc3816-2a79-4e41-8337-bc61c3bbafc8\") " pod="metallb-system/speaker-6wvbb" Oct 09 09:16:28 crc kubenswrapper[4710]: E1009 09:16:28.649609 4710 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 09 09:16:28 crc kubenswrapper[4710]: E1009 09:16:28.649672 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f1dc3816-2a79-4e41-8337-bc61c3bbafc8-memberlist podName:f1dc3816-2a79-4e41-8337-bc61c3bbafc8 nodeName:}" failed. No retries permitted until 2025-10-09 09:16:29.649655453 +0000 UTC m=+713.139763850 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/f1dc3816-2a79-4e41-8337-bc61c3bbafc8-memberlist") pod "speaker-6wvbb" (UID: "f1dc3816-2a79-4e41-8337-bc61c3bbafc8") : secret "metallb-memberlist" not found Oct 09 09:16:28 crc kubenswrapper[4710]: I1009 09:16:28.882674 4710 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-9xjwg" Oct 09 09:16:29 crc kubenswrapper[4710]: I1009 09:16:29.327712 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-fjp44" event={"ID":"74b53642-04a4-4331-806b-c9f84d190746","Type":"ContainerStarted","Data":"2600099ac6896273c71ad7aed6641a0b367bbd66b32db51d1c34dcaac440bb78"} Oct 09 09:16:29 crc kubenswrapper[4710]: I1009 09:16:29.327789 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-fjp44" event={"ID":"74b53642-04a4-4331-806b-c9f84d190746","Type":"ContainerStarted","Data":"0e6e7c8c8ed014b56a5d628fd0871f7409204625674f36045b33d72f00d63965"} Oct 09 09:16:29 crc kubenswrapper[4710]: I1009 09:16:29.328047 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-68d546b9d8-fjp44" Oct 09 09:16:29 crc kubenswrapper[4710]: I1009 09:16:29.328073 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-fjp44" event={"ID":"74b53642-04a4-4331-806b-c9f84d190746","Type":"ContainerStarted","Data":"fa13877221aee14f1f07c5984ecc5dc40261f8be92e70b4e2804e162a2e3fce9"} Oct 09 09:16:29 crc kubenswrapper[4710]: I1009 09:16:29.329537 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t2fhd" event={"ID":"f03c49fa-bc71-4bac-b0bb-e25876b0cef7","Type":"ContainerStarted","Data":"e18301394756fd05bebfae7d4873f994d8f32f21a12eac2deac95f092239fd69"} Oct 09 09:16:29 crc kubenswrapper[4710]: I1009 09:16:29.346810 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-68d546b9d8-fjp44" podStartSLOduration=2.346790807 podStartE2EDuration="2.346790807s" podCreationTimestamp="2025-10-09 09:16:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:16:29.344046221 +0000 UTC m=+712.834154618" watchObservedRunningTime="2025-10-09 09:16:29.346790807 +0000 UTC m=+712.836899203" Oct 09 09:16:29 crc kubenswrapper[4710]: I1009 09:16:29.662761 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f1dc3816-2a79-4e41-8337-bc61c3bbafc8-memberlist\") pod \"speaker-6wvbb\" (UID: \"f1dc3816-2a79-4e41-8337-bc61c3bbafc8\") " pod="metallb-system/speaker-6wvbb" Oct 09 09:16:29 crc kubenswrapper[4710]: I1009 09:16:29.668424 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/f1dc3816-2a79-4e41-8337-bc61c3bbafc8-memberlist\") pod \"speaker-6wvbb\" (UID: \"f1dc3816-2a79-4e41-8337-bc61c3bbafc8\") " pod="metallb-system/speaker-6wvbb" Oct 09 09:16:29 crc kubenswrapper[4710]: I1009 09:16:29.721751 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-6wvbb" Oct 09 09:16:29 crc kubenswrapper[4710]: W1009 09:16:29.741892 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1dc3816_2a79_4e41_8337_bc61c3bbafc8.slice/crio-9cb9d9b82eca85811107e3cd37b15809d43cfefaa761acab7c4202ff34d781dd WatchSource:0}: Error finding container 9cb9d9b82eca85811107e3cd37b15809d43cfefaa761acab7c4202ff34d781dd: Status 404 returned error can't find the container with id 9cb9d9b82eca85811107e3cd37b15809d43cfefaa761acab7c4202ff34d781dd Oct 09 09:16:30 crc kubenswrapper[4710]: I1009 09:16:30.337402 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-6wvbb" event={"ID":"f1dc3816-2a79-4e41-8337-bc61c3bbafc8","Type":"ContainerStarted","Data":"f063e9dbff1c88967437ea7e9d3e40b18667c585896c5f66737384a7ed3a3598"} Oct 09 09:16:30 crc kubenswrapper[4710]: I1009 09:16:30.337699 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-6wvbb" event={"ID":"f1dc3816-2a79-4e41-8337-bc61c3bbafc8","Type":"ContainerStarted","Data":"b594a7da1445751eded28aa383d6cd79d02f01f8d91df207c1893ab657eb4594"} Oct 09 09:16:30 crc kubenswrapper[4710]: I1009 09:16:30.337712 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-6wvbb" event={"ID":"f1dc3816-2a79-4e41-8337-bc61c3bbafc8","Type":"ContainerStarted","Data":"9cb9d9b82eca85811107e3cd37b15809d43cfefaa761acab7c4202ff34d781dd"} Oct 09 09:16:30 crc kubenswrapper[4710]: I1009 09:16:30.337916 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-6wvbb" Oct 09 09:16:30 crc kubenswrapper[4710]: I1009 09:16:30.360795 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-6wvbb" podStartSLOduration=3.360782087 podStartE2EDuration="3.360782087s" podCreationTimestamp="2025-10-09 09:16:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:16:30.358389675 +0000 UTC m=+713.848498073" watchObservedRunningTime="2025-10-09 09:16:30.360782087 +0000 UTC m=+713.850890483" Oct 09 09:16:35 crc kubenswrapper[4710]: I1009 09:16:35.545706 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:16:35 crc kubenswrapper[4710]: I1009 09:16:35.546636 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:16:36 crc kubenswrapper[4710]: I1009 09:16:36.383141 4710 generic.go:334] "Generic (PLEG): container finished" podID="aab75afc-7b04-4a39-afd5-3109512334d6" containerID="132c6386561e2a9e8d66b1722fb17c134b1f5e6c05689be2dd5c9f2f4d969863" exitCode=0 Oct 09 09:16:36 crc kubenswrapper[4710]: I1009 09:16:36.383536 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mqt2m" event={"ID":"aab75afc-7b04-4a39-afd5-3109512334d6","Type":"ContainerDied","Data":"132c6386561e2a9e8d66b1722fb17c134b1f5e6c05689be2dd5c9f2f4d969863"} Oct 09 09:16:36 crc kubenswrapper[4710]: I1009 09:16:36.389538 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t2fhd" event={"ID":"f03c49fa-bc71-4bac-b0bb-e25876b0cef7","Type":"ContainerStarted","Data":"8d6cd0ab0a87badf55032f20ea954cee464b73765da7ffa64190a737d64877ec"} Oct 09 09:16:36 crc kubenswrapper[4710]: I1009 09:16:36.389902 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t2fhd" Oct 09 09:16:36 crc kubenswrapper[4710]: I1009 09:16:36.426026 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t2fhd" podStartSLOduration=2.239636487 podStartE2EDuration="9.42600789s" podCreationTimestamp="2025-10-09 09:16:27 +0000 UTC" firstStartedPulling="2025-10-09 09:16:28.543020725 +0000 UTC m=+712.033129122" lastFinishedPulling="2025-10-09 09:16:35.729392127 +0000 UTC m=+719.219500525" observedRunningTime="2025-10-09 09:16:36.422418051 +0000 UTC m=+719.912526448" watchObservedRunningTime="2025-10-09 09:16:36.42600789 +0000 UTC m=+719.916116288" Oct 09 09:16:37 crc kubenswrapper[4710]: I1009 09:16:37.397223 4710 generic.go:334] "Generic (PLEG): container finished" podID="aab75afc-7b04-4a39-afd5-3109512334d6" containerID="ede8a20c9ada3bb398cc04a465952178017d97ef3bc4379b724c12592e965a29" exitCode=0 Oct 09 09:16:37 crc kubenswrapper[4710]: I1009 09:16:37.397304 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mqt2m" event={"ID":"aab75afc-7b04-4a39-afd5-3109512334d6","Type":"ContainerDied","Data":"ede8a20c9ada3bb398cc04a465952178017d97ef3bc4379b724c12592e965a29"} Oct 09 09:16:38 crc kubenswrapper[4710]: I1009 09:16:38.235493 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-68d546b9d8-fjp44" Oct 09 09:16:38 crc kubenswrapper[4710]: I1009 09:16:38.404985 4710 generic.go:334] "Generic (PLEG): container finished" podID="aab75afc-7b04-4a39-afd5-3109512334d6" containerID="c155c191b245bb89c4580b94576c175faad59572622b620ee03895cb12fd9ac5" exitCode=0 Oct 09 09:16:38 crc kubenswrapper[4710]: I1009 09:16:38.405032 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mqt2m" event={"ID":"aab75afc-7b04-4a39-afd5-3109512334d6","Type":"ContainerDied","Data":"c155c191b245bb89c4580b94576c175faad59572622b620ee03895cb12fd9ac5"} Oct 09 09:16:39 crc kubenswrapper[4710]: I1009 09:16:39.418058 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mqt2m" event={"ID":"aab75afc-7b04-4a39-afd5-3109512334d6","Type":"ContainerStarted","Data":"01e8f64a856dfac4af3b4830ed7af801423e270e1f12845a5736f97312960c10"} Oct 09 09:16:39 crc kubenswrapper[4710]: I1009 09:16:39.418567 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mqt2m" event={"ID":"aab75afc-7b04-4a39-afd5-3109512334d6","Type":"ContainerStarted","Data":"a39e3f086319c6a05ce0c4c82a1e27c6da5b9e85735216aca6ac8ff734ecdc1c"} Oct 09 09:16:39 crc kubenswrapper[4710]: I1009 09:16:39.418584 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mqt2m" event={"ID":"aab75afc-7b04-4a39-afd5-3109512334d6","Type":"ContainerStarted","Data":"616eea788fde652de50010cb31b7d74f2cbaf89f1f80d687f1273188769371b7"} Oct 09 09:16:39 crc kubenswrapper[4710]: I1009 09:16:39.418596 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mqt2m" event={"ID":"aab75afc-7b04-4a39-afd5-3109512334d6","Type":"ContainerStarted","Data":"b5452333fe7fba7151d9b79a412c4086bfe6534057aaee52fd7b10c7d6ece19a"} Oct 09 09:16:39 crc kubenswrapper[4710]: I1009 09:16:39.418605 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mqt2m" event={"ID":"aab75afc-7b04-4a39-afd5-3109512334d6","Type":"ContainerStarted","Data":"40f5890a76227e784a2cd1a726b2f79439de4883604f43416bd80e78080b1294"} Oct 09 09:16:39 crc kubenswrapper[4710]: I1009 09:16:39.418616 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-mqt2m" event={"ID":"aab75afc-7b04-4a39-afd5-3109512334d6","Type":"ContainerStarted","Data":"213efe1b121523c88497a4eebf063e0cac36756f81c5c97c87b106210a4de261"} Oct 09 09:16:39 crc kubenswrapper[4710]: I1009 09:16:39.419797 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:39 crc kubenswrapper[4710]: I1009 09:16:39.442104 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-mqt2m" podStartSLOduration=4.987839456 podStartE2EDuration="12.442081037s" podCreationTimestamp="2025-10-09 09:16:27 +0000 UTC" firstStartedPulling="2025-10-09 09:16:28.256857512 +0000 UTC m=+711.746965909" lastFinishedPulling="2025-10-09 09:16:35.711099094 +0000 UTC m=+719.201207490" observedRunningTime="2025-10-09 09:16:39.440484947 +0000 UTC m=+722.930593344" watchObservedRunningTime="2025-10-09 09:16:39.442081037 +0000 UTC m=+722.932189434" Oct 09 09:16:39 crc kubenswrapper[4710]: I1009 09:16:39.725723 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-6wvbb" Oct 09 09:16:41 crc kubenswrapper[4710]: I1009 09:16:41.961113 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-mszw9"] Oct 09 09:16:41 crc kubenswrapper[4710]: I1009 09:16:41.962030 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mszw9" Oct 09 09:16:41 crc kubenswrapper[4710]: I1009 09:16:41.963643 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-9qsmw" Oct 09 09:16:41 crc kubenswrapper[4710]: I1009 09:16:41.965151 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Oct 09 09:16:41 crc kubenswrapper[4710]: I1009 09:16:41.965397 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Oct 09 09:16:41 crc kubenswrapper[4710]: I1009 09:16:41.978924 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-mszw9"] Oct 09 09:16:42 crc kubenswrapper[4710]: I1009 09:16:42.081481 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xz4nh\" (UniqueName: \"kubernetes.io/projected/c33edf3e-64f3-40d4-8182-a518fd7d78fa-kube-api-access-xz4nh\") pod \"openstack-operator-index-mszw9\" (UID: \"c33edf3e-64f3-40d4-8182-a518fd7d78fa\") " pod="openstack-operators/openstack-operator-index-mszw9" Oct 09 09:16:42 crc kubenswrapper[4710]: I1009 09:16:42.182528 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xz4nh\" (UniqueName: \"kubernetes.io/projected/c33edf3e-64f3-40d4-8182-a518fd7d78fa-kube-api-access-xz4nh\") pod \"openstack-operator-index-mszw9\" (UID: \"c33edf3e-64f3-40d4-8182-a518fd7d78fa\") " pod="openstack-operators/openstack-operator-index-mszw9" Oct 09 09:16:42 crc kubenswrapper[4710]: I1009 09:16:42.208113 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xz4nh\" (UniqueName: \"kubernetes.io/projected/c33edf3e-64f3-40d4-8182-a518fd7d78fa-kube-api-access-xz4nh\") pod \"openstack-operator-index-mszw9\" (UID: \"c33edf3e-64f3-40d4-8182-a518fd7d78fa\") " pod="openstack-operators/openstack-operator-index-mszw9" Oct 09 09:16:42 crc kubenswrapper[4710]: I1009 09:16:42.275389 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mszw9" Oct 09 09:16:42 crc kubenswrapper[4710]: I1009 09:16:42.676837 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-mszw9"] Oct 09 09:16:42 crc kubenswrapper[4710]: W1009 09:16:42.682353 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc33edf3e_64f3_40d4_8182_a518fd7d78fa.slice/crio-e63b08826effafcb66218cf463912c6dd3f8f30e948728f0450a5b1c4df90d4e WatchSource:0}: Error finding container e63b08826effafcb66218cf463912c6dd3f8f30e948728f0450a5b1c4df90d4e: Status 404 returned error can't find the container with id e63b08826effafcb66218cf463912c6dd3f8f30e948728f0450a5b1c4df90d4e Oct 09 09:16:43 crc kubenswrapper[4710]: I1009 09:16:43.115193 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:43 crc kubenswrapper[4710]: I1009 09:16:43.145371 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:43 crc kubenswrapper[4710]: I1009 09:16:43.445519 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mszw9" event={"ID":"c33edf3e-64f3-40d4-8182-a518fd7d78fa","Type":"ContainerStarted","Data":"e63b08826effafcb66218cf463912c6dd3f8f30e948728f0450a5b1c4df90d4e"} Oct 09 09:16:44 crc kubenswrapper[4710]: I1009 09:16:44.454788 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mszw9" event={"ID":"c33edf3e-64f3-40d4-8182-a518fd7d78fa","Type":"ContainerStarted","Data":"ca4f1e667022b04eedf84a51028250caa5cda223a1e43ece95dbe384167c2bef"} Oct 09 09:16:44 crc kubenswrapper[4710]: I1009 09:16:44.468414 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-mszw9" podStartSLOduration=1.8795119260000002 podStartE2EDuration="3.468398569s" podCreationTimestamp="2025-10-09 09:16:41 +0000 UTC" firstStartedPulling="2025-10-09 09:16:42.684412135 +0000 UTC m=+726.174520533" lastFinishedPulling="2025-10-09 09:16:44.273298779 +0000 UTC m=+727.763407176" observedRunningTime="2025-10-09 09:16:44.467679122 +0000 UTC m=+727.957787519" watchObservedRunningTime="2025-10-09 09:16:44.468398569 +0000 UTC m=+727.958506965" Oct 09 09:16:45 crc kubenswrapper[4710]: I1009 09:16:45.135401 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-mszw9"] Oct 09 09:16:45 crc kubenswrapper[4710]: I1009 09:16:45.741926 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-nlptw"] Oct 09 09:16:45 crc kubenswrapper[4710]: I1009 09:16:45.743690 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-nlptw" Oct 09 09:16:45 crc kubenswrapper[4710]: I1009 09:16:45.751873 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-nlptw"] Oct 09 09:16:45 crc kubenswrapper[4710]: I1009 09:16:45.848481 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhwd7\" (UniqueName: \"kubernetes.io/projected/ea70cec4-b9bc-48b8-8871-034a6d5b392a-kube-api-access-mhwd7\") pod \"openstack-operator-index-nlptw\" (UID: \"ea70cec4-b9bc-48b8-8871-034a6d5b392a\") " pod="openstack-operators/openstack-operator-index-nlptw" Oct 09 09:16:45 crc kubenswrapper[4710]: I1009 09:16:45.949563 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhwd7\" (UniqueName: \"kubernetes.io/projected/ea70cec4-b9bc-48b8-8871-034a6d5b392a-kube-api-access-mhwd7\") pod \"openstack-operator-index-nlptw\" (UID: \"ea70cec4-b9bc-48b8-8871-034a6d5b392a\") " pod="openstack-operators/openstack-operator-index-nlptw" Oct 09 09:16:45 crc kubenswrapper[4710]: I1009 09:16:45.968814 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhwd7\" (UniqueName: \"kubernetes.io/projected/ea70cec4-b9bc-48b8-8871-034a6d5b392a-kube-api-access-mhwd7\") pod \"openstack-operator-index-nlptw\" (UID: \"ea70cec4-b9bc-48b8-8871-034a6d5b392a\") " pod="openstack-operators/openstack-operator-index-nlptw" Oct 09 09:16:46 crc kubenswrapper[4710]: I1009 09:16:46.057392 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-nlptw" Oct 09 09:16:46 crc kubenswrapper[4710]: I1009 09:16:46.434817 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-nlptw"] Oct 09 09:16:46 crc kubenswrapper[4710]: I1009 09:16:46.464630 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-nlptw" event={"ID":"ea70cec4-b9bc-48b8-8871-034a6d5b392a","Type":"ContainerStarted","Data":"eaef73902bcc07ba063b9ddbede8e59601140341cd236658ff2c97e5a55cc76d"} Oct 09 09:16:46 crc kubenswrapper[4710]: I1009 09:16:46.464754 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-mszw9" podUID="c33edf3e-64f3-40d4-8182-a518fd7d78fa" containerName="registry-server" containerID="cri-o://ca4f1e667022b04eedf84a51028250caa5cda223a1e43ece95dbe384167c2bef" gracePeriod=2 Oct 09 09:16:46 crc kubenswrapper[4710]: I1009 09:16:46.757166 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mszw9" Oct 09 09:16:46 crc kubenswrapper[4710]: I1009 09:16:46.864505 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xz4nh\" (UniqueName: \"kubernetes.io/projected/c33edf3e-64f3-40d4-8182-a518fd7d78fa-kube-api-access-xz4nh\") pod \"c33edf3e-64f3-40d4-8182-a518fd7d78fa\" (UID: \"c33edf3e-64f3-40d4-8182-a518fd7d78fa\") " Oct 09 09:16:46 crc kubenswrapper[4710]: I1009 09:16:46.870417 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c33edf3e-64f3-40d4-8182-a518fd7d78fa-kube-api-access-xz4nh" (OuterVolumeSpecName: "kube-api-access-xz4nh") pod "c33edf3e-64f3-40d4-8182-a518fd7d78fa" (UID: "c33edf3e-64f3-40d4-8182-a518fd7d78fa"). InnerVolumeSpecName "kube-api-access-xz4nh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:16:46 crc kubenswrapper[4710]: I1009 09:16:46.966451 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xz4nh\" (UniqueName: \"kubernetes.io/projected/c33edf3e-64f3-40d4-8182-a518fd7d78fa-kube-api-access-xz4nh\") on node \"crc\" DevicePath \"\"" Oct 09 09:16:47 crc kubenswrapper[4710]: I1009 09:16:47.471238 4710 generic.go:334] "Generic (PLEG): container finished" podID="c33edf3e-64f3-40d4-8182-a518fd7d78fa" containerID="ca4f1e667022b04eedf84a51028250caa5cda223a1e43ece95dbe384167c2bef" exitCode=0 Oct 09 09:16:47 crc kubenswrapper[4710]: I1009 09:16:47.471329 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mszw9" Oct 09 09:16:47 crc kubenswrapper[4710]: I1009 09:16:47.471669 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mszw9" event={"ID":"c33edf3e-64f3-40d4-8182-a518fd7d78fa","Type":"ContainerDied","Data":"ca4f1e667022b04eedf84a51028250caa5cda223a1e43ece95dbe384167c2bef"} Oct 09 09:16:47 crc kubenswrapper[4710]: I1009 09:16:47.471698 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mszw9" event={"ID":"c33edf3e-64f3-40d4-8182-a518fd7d78fa","Type":"ContainerDied","Data":"e63b08826effafcb66218cf463912c6dd3f8f30e948728f0450a5b1c4df90d4e"} Oct 09 09:16:47 crc kubenswrapper[4710]: I1009 09:16:47.471714 4710 scope.go:117] "RemoveContainer" containerID="ca4f1e667022b04eedf84a51028250caa5cda223a1e43ece95dbe384167c2bef" Oct 09 09:16:47 crc kubenswrapper[4710]: I1009 09:16:47.473515 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-nlptw" event={"ID":"ea70cec4-b9bc-48b8-8871-034a6d5b392a","Type":"ContainerStarted","Data":"82740284d191d51b6ee82f41f2e4c93c20d79d2ea7d605c0fd7ef698d68a1691"} Oct 09 09:16:47 crc kubenswrapper[4710]: I1009 09:16:47.487654 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-nlptw" podStartSLOduration=1.980066404 podStartE2EDuration="2.487637354s" podCreationTimestamp="2025-10-09 09:16:45 +0000 UTC" firstStartedPulling="2025-10-09 09:16:46.445751542 +0000 UTC m=+729.935859940" lastFinishedPulling="2025-10-09 09:16:46.953322493 +0000 UTC m=+730.443430890" observedRunningTime="2025-10-09 09:16:47.4850204 +0000 UTC m=+730.975128797" watchObservedRunningTime="2025-10-09 09:16:47.487637354 +0000 UTC m=+730.977745750" Oct 09 09:16:47 crc kubenswrapper[4710]: I1009 09:16:47.491949 4710 scope.go:117] "RemoveContainer" containerID="ca4f1e667022b04eedf84a51028250caa5cda223a1e43ece95dbe384167c2bef" Oct 09 09:16:47 crc kubenswrapper[4710]: E1009 09:16:47.492727 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca4f1e667022b04eedf84a51028250caa5cda223a1e43ece95dbe384167c2bef\": container with ID starting with ca4f1e667022b04eedf84a51028250caa5cda223a1e43ece95dbe384167c2bef not found: ID does not exist" containerID="ca4f1e667022b04eedf84a51028250caa5cda223a1e43ece95dbe384167c2bef" Oct 09 09:16:47 crc kubenswrapper[4710]: I1009 09:16:47.492806 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca4f1e667022b04eedf84a51028250caa5cda223a1e43ece95dbe384167c2bef"} err="failed to get container status \"ca4f1e667022b04eedf84a51028250caa5cda223a1e43ece95dbe384167c2bef\": rpc error: code = NotFound desc = could not find container \"ca4f1e667022b04eedf84a51028250caa5cda223a1e43ece95dbe384167c2bef\": container with ID starting with ca4f1e667022b04eedf84a51028250caa5cda223a1e43ece95dbe384167c2bef not found: ID does not exist" Oct 09 09:16:47 crc kubenswrapper[4710]: I1009 09:16:47.498742 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-mszw9"] Oct 09 09:16:47 crc kubenswrapper[4710]: I1009 09:16:47.502216 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-mszw9"] Oct 09 09:16:48 crc kubenswrapper[4710]: I1009 09:16:48.117950 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-mqt2m" Oct 09 09:16:48 crc kubenswrapper[4710]: I1009 09:16:48.154409 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-t2fhd" Oct 09 09:16:48 crc kubenswrapper[4710]: I1009 09:16:48.822321 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c33edf3e-64f3-40d4-8182-a518fd7d78fa" path="/var/lib/kubelet/pods/c33edf3e-64f3-40d4-8182-a518fd7d78fa/volumes" Oct 09 09:16:56 crc kubenswrapper[4710]: I1009 09:16:56.057951 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-nlptw" Oct 09 09:16:56 crc kubenswrapper[4710]: I1009 09:16:56.058374 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-nlptw" Oct 09 09:16:56 crc kubenswrapper[4710]: I1009 09:16:56.080401 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-nlptw" Oct 09 09:16:56 crc kubenswrapper[4710]: I1009 09:16:56.537476 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-nlptw" Oct 09 09:16:57 crc kubenswrapper[4710]: I1009 09:16:57.961933 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt"] Oct 09 09:16:57 crc kubenswrapper[4710]: E1009 09:16:57.962705 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c33edf3e-64f3-40d4-8182-a518fd7d78fa" containerName="registry-server" Oct 09 09:16:57 crc kubenswrapper[4710]: I1009 09:16:57.962724 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="c33edf3e-64f3-40d4-8182-a518fd7d78fa" containerName="registry-server" Oct 09 09:16:57 crc kubenswrapper[4710]: I1009 09:16:57.962819 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="c33edf3e-64f3-40d4-8182-a518fd7d78fa" containerName="registry-server" Oct 09 09:16:57 crc kubenswrapper[4710]: I1009 09:16:57.963517 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt" Oct 09 09:16:57 crc kubenswrapper[4710]: I1009 09:16:57.965073 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-ngf2d" Oct 09 09:16:57 crc kubenswrapper[4710]: I1009 09:16:57.969692 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt"] Oct 09 09:16:57 crc kubenswrapper[4710]: I1009 09:16:57.992420 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65l4f\" (UniqueName: \"kubernetes.io/projected/cfbeb551-9915-4071-a67c-5a88443100f1-kube-api-access-65l4f\") pod \"184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt\" (UID: \"cfbeb551-9915-4071-a67c-5a88443100f1\") " pod="openstack-operators/184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt" Oct 09 09:16:57 crc kubenswrapper[4710]: I1009 09:16:57.992497 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cfbeb551-9915-4071-a67c-5a88443100f1-util\") pod \"184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt\" (UID: \"cfbeb551-9915-4071-a67c-5a88443100f1\") " pod="openstack-operators/184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt" Oct 09 09:16:57 crc kubenswrapper[4710]: I1009 09:16:57.992573 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cfbeb551-9915-4071-a67c-5a88443100f1-bundle\") pod \"184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt\" (UID: \"cfbeb551-9915-4071-a67c-5a88443100f1\") " pod="openstack-operators/184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt" Oct 09 09:16:58 crc kubenswrapper[4710]: I1009 09:16:58.094035 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cfbeb551-9915-4071-a67c-5a88443100f1-bundle\") pod \"184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt\" (UID: \"cfbeb551-9915-4071-a67c-5a88443100f1\") " pod="openstack-operators/184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt" Oct 09 09:16:58 crc kubenswrapper[4710]: I1009 09:16:58.094235 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65l4f\" (UniqueName: \"kubernetes.io/projected/cfbeb551-9915-4071-a67c-5a88443100f1-kube-api-access-65l4f\") pod \"184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt\" (UID: \"cfbeb551-9915-4071-a67c-5a88443100f1\") " pod="openstack-operators/184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt" Oct 09 09:16:58 crc kubenswrapper[4710]: I1009 09:16:58.094339 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cfbeb551-9915-4071-a67c-5a88443100f1-util\") pod \"184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt\" (UID: \"cfbeb551-9915-4071-a67c-5a88443100f1\") " pod="openstack-operators/184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt" Oct 09 09:16:58 crc kubenswrapper[4710]: I1009 09:16:58.094454 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cfbeb551-9915-4071-a67c-5a88443100f1-bundle\") pod \"184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt\" (UID: \"cfbeb551-9915-4071-a67c-5a88443100f1\") " pod="openstack-operators/184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt" Oct 09 09:16:58 crc kubenswrapper[4710]: I1009 09:16:58.094736 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cfbeb551-9915-4071-a67c-5a88443100f1-util\") pod \"184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt\" (UID: \"cfbeb551-9915-4071-a67c-5a88443100f1\") " pod="openstack-operators/184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt" Oct 09 09:16:58 crc kubenswrapper[4710]: I1009 09:16:58.109703 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65l4f\" (UniqueName: \"kubernetes.io/projected/cfbeb551-9915-4071-a67c-5a88443100f1-kube-api-access-65l4f\") pod \"184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt\" (UID: \"cfbeb551-9915-4071-a67c-5a88443100f1\") " pod="openstack-operators/184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt" Oct 09 09:16:58 crc kubenswrapper[4710]: I1009 09:16:58.276873 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt" Oct 09 09:16:58 crc kubenswrapper[4710]: I1009 09:16:58.630848 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt"] Oct 09 09:16:58 crc kubenswrapper[4710]: W1009 09:16:58.633904 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcfbeb551_9915_4071_a67c_5a88443100f1.slice/crio-825aff9f7d4a87422d248e51f8cae26fb08066c2b45de61778f3cbf978683bd9 WatchSource:0}: Error finding container 825aff9f7d4a87422d248e51f8cae26fb08066c2b45de61778f3cbf978683bd9: Status 404 returned error can't find the container with id 825aff9f7d4a87422d248e51f8cae26fb08066c2b45de61778f3cbf978683bd9 Oct 09 09:16:59 crc kubenswrapper[4710]: I1009 09:16:59.534454 4710 generic.go:334] "Generic (PLEG): container finished" podID="cfbeb551-9915-4071-a67c-5a88443100f1" containerID="8fb284eb7c5ede68bfd14c20e9eb9c07b3f41dc9ffac8a344c88dd8cf919b059" exitCode=0 Oct 09 09:16:59 crc kubenswrapper[4710]: I1009 09:16:59.534494 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt" event={"ID":"cfbeb551-9915-4071-a67c-5a88443100f1","Type":"ContainerDied","Data":"8fb284eb7c5ede68bfd14c20e9eb9c07b3f41dc9ffac8a344c88dd8cf919b059"} Oct 09 09:16:59 crc kubenswrapper[4710]: I1009 09:16:59.534518 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt" event={"ID":"cfbeb551-9915-4071-a67c-5a88443100f1","Type":"ContainerStarted","Data":"825aff9f7d4a87422d248e51f8cae26fb08066c2b45de61778f3cbf978683bd9"} Oct 09 09:17:00 crc kubenswrapper[4710]: I1009 09:17:00.539869 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt" event={"ID":"cfbeb551-9915-4071-a67c-5a88443100f1","Type":"ContainerStarted","Data":"01d4d6fa49adb4361af8366543c994d0b67633cfc005238713cb07bf8cc5dc1f"} Oct 09 09:17:01 crc kubenswrapper[4710]: I1009 09:17:01.545363 4710 generic.go:334] "Generic (PLEG): container finished" podID="cfbeb551-9915-4071-a67c-5a88443100f1" containerID="01d4d6fa49adb4361af8366543c994d0b67633cfc005238713cb07bf8cc5dc1f" exitCode=0 Oct 09 09:17:01 crc kubenswrapper[4710]: I1009 09:17:01.545396 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt" event={"ID":"cfbeb551-9915-4071-a67c-5a88443100f1","Type":"ContainerDied","Data":"01d4d6fa49adb4361af8366543c994d0b67633cfc005238713cb07bf8cc5dc1f"} Oct 09 09:17:02 crc kubenswrapper[4710]: I1009 09:17:02.551704 4710 generic.go:334] "Generic (PLEG): container finished" podID="cfbeb551-9915-4071-a67c-5a88443100f1" containerID="fae417a40291a527dfd1294bb39494b6421c4949895dc344b6227e6d1c4d7a42" exitCode=0 Oct 09 09:17:02 crc kubenswrapper[4710]: I1009 09:17:02.551740 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt" event={"ID":"cfbeb551-9915-4071-a67c-5a88443100f1","Type":"ContainerDied","Data":"fae417a40291a527dfd1294bb39494b6421c4949895dc344b6227e6d1c4d7a42"} Oct 09 09:17:03 crc kubenswrapper[4710]: I1009 09:17:03.751323 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt" Oct 09 09:17:03 crc kubenswrapper[4710]: I1009 09:17:03.850386 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cfbeb551-9915-4071-a67c-5a88443100f1-bundle\") pod \"cfbeb551-9915-4071-a67c-5a88443100f1\" (UID: \"cfbeb551-9915-4071-a67c-5a88443100f1\") " Oct 09 09:17:03 crc kubenswrapper[4710]: I1009 09:17:03.850458 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65l4f\" (UniqueName: \"kubernetes.io/projected/cfbeb551-9915-4071-a67c-5a88443100f1-kube-api-access-65l4f\") pod \"cfbeb551-9915-4071-a67c-5a88443100f1\" (UID: \"cfbeb551-9915-4071-a67c-5a88443100f1\") " Oct 09 09:17:03 crc kubenswrapper[4710]: I1009 09:17:03.850542 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cfbeb551-9915-4071-a67c-5a88443100f1-util\") pod \"cfbeb551-9915-4071-a67c-5a88443100f1\" (UID: \"cfbeb551-9915-4071-a67c-5a88443100f1\") " Oct 09 09:17:03 crc kubenswrapper[4710]: I1009 09:17:03.851597 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cfbeb551-9915-4071-a67c-5a88443100f1-bundle" (OuterVolumeSpecName: "bundle") pod "cfbeb551-9915-4071-a67c-5a88443100f1" (UID: "cfbeb551-9915-4071-a67c-5a88443100f1"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:17:03 crc kubenswrapper[4710]: I1009 09:17:03.857443 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfbeb551-9915-4071-a67c-5a88443100f1-kube-api-access-65l4f" (OuterVolumeSpecName: "kube-api-access-65l4f") pod "cfbeb551-9915-4071-a67c-5a88443100f1" (UID: "cfbeb551-9915-4071-a67c-5a88443100f1"). InnerVolumeSpecName "kube-api-access-65l4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:17:03 crc kubenswrapper[4710]: I1009 09:17:03.863419 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cfbeb551-9915-4071-a67c-5a88443100f1-util" (OuterVolumeSpecName: "util") pod "cfbeb551-9915-4071-a67c-5a88443100f1" (UID: "cfbeb551-9915-4071-a67c-5a88443100f1"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:17:03 crc kubenswrapper[4710]: I1009 09:17:03.952528 4710 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cfbeb551-9915-4071-a67c-5a88443100f1-util\") on node \"crc\" DevicePath \"\"" Oct 09 09:17:03 crc kubenswrapper[4710]: I1009 09:17:03.952549 4710 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cfbeb551-9915-4071-a67c-5a88443100f1-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:17:03 crc kubenswrapper[4710]: I1009 09:17:03.952558 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65l4f\" (UniqueName: \"kubernetes.io/projected/cfbeb551-9915-4071-a67c-5a88443100f1-kube-api-access-65l4f\") on node \"crc\" DevicePath \"\"" Oct 09 09:17:04 crc kubenswrapper[4710]: I1009 09:17:04.562821 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt" event={"ID":"cfbeb551-9915-4071-a67c-5a88443100f1","Type":"ContainerDied","Data":"825aff9f7d4a87422d248e51f8cae26fb08066c2b45de61778f3cbf978683bd9"} Oct 09 09:17:04 crc kubenswrapper[4710]: I1009 09:17:04.562853 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="825aff9f7d4a87422d248e51f8cae26fb08066c2b45de61778f3cbf978683bd9" Oct 09 09:17:04 crc kubenswrapper[4710]: I1009 09:17:04.562861 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt" Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.485829 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lgdvx"] Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.486227 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" podUID="e6867809-73e3-4291-97d4-cb38b0aeae7b" containerName="controller-manager" containerID="cri-o://2e6c0e5c5c55f61ce0b7b5971559baf917db34a010bcd0042d84d944bb5644a3" gracePeriod=30 Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.545707 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.545756 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.545794 4710 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.546258 4710 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4d64191b75158ae2723b09865a9bd6d7523a53aa9da415f79edde49e77da405d"} pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.546311 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" containerID="cri-o://4d64191b75158ae2723b09865a9bd6d7523a53aa9da415f79edde49e77da405d" gracePeriod=600 Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.609568 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2"] Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.612462 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" podUID="58d085ee-1389-48a6-b185-a036265014d2" containerName="route-controller-manager" containerID="cri-o://c445af8292f14e4f23f4d8167605efd9cd3f164168922228b3e780d4f1904a7b" gracePeriod=30 Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.849800 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.871859 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bbk7\" (UniqueName: \"kubernetes.io/projected/e6867809-73e3-4291-97d4-cb38b0aeae7b-kube-api-access-6bbk7\") pod \"e6867809-73e3-4291-97d4-cb38b0aeae7b\" (UID: \"e6867809-73e3-4291-97d4-cb38b0aeae7b\") " Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.871935 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6867809-73e3-4291-97d4-cb38b0aeae7b-config\") pod \"e6867809-73e3-4291-97d4-cb38b0aeae7b\" (UID: \"e6867809-73e3-4291-97d4-cb38b0aeae7b\") " Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.871979 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e6867809-73e3-4291-97d4-cb38b0aeae7b-client-ca\") pod \"e6867809-73e3-4291-97d4-cb38b0aeae7b\" (UID: \"e6867809-73e3-4291-97d4-cb38b0aeae7b\") " Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.871992 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e6867809-73e3-4291-97d4-cb38b0aeae7b-proxy-ca-bundles\") pod \"e6867809-73e3-4291-97d4-cb38b0aeae7b\" (UID: \"e6867809-73e3-4291-97d4-cb38b0aeae7b\") " Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.872036 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6867809-73e3-4291-97d4-cb38b0aeae7b-serving-cert\") pod \"e6867809-73e3-4291-97d4-cb38b0aeae7b\" (UID: \"e6867809-73e3-4291-97d4-cb38b0aeae7b\") " Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.873423 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6867809-73e3-4291-97d4-cb38b0aeae7b-config" (OuterVolumeSpecName: "config") pod "e6867809-73e3-4291-97d4-cb38b0aeae7b" (UID: "e6867809-73e3-4291-97d4-cb38b0aeae7b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.873760 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6867809-73e3-4291-97d4-cb38b0aeae7b-client-ca" (OuterVolumeSpecName: "client-ca") pod "e6867809-73e3-4291-97d4-cb38b0aeae7b" (UID: "e6867809-73e3-4291-97d4-cb38b0aeae7b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.874018 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6867809-73e3-4291-97d4-cb38b0aeae7b-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "e6867809-73e3-4291-97d4-cb38b0aeae7b" (UID: "e6867809-73e3-4291-97d4-cb38b0aeae7b"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.879744 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6867809-73e3-4291-97d4-cb38b0aeae7b-kube-api-access-6bbk7" (OuterVolumeSpecName: "kube-api-access-6bbk7") pod "e6867809-73e3-4291-97d4-cb38b0aeae7b" (UID: "e6867809-73e3-4291-97d4-cb38b0aeae7b"). InnerVolumeSpecName "kube-api-access-6bbk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.886446 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6867809-73e3-4291-97d4-cb38b0aeae7b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e6867809-73e3-4291-97d4-cb38b0aeae7b" (UID: "e6867809-73e3-4291-97d4-cb38b0aeae7b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.910144 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.973488 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/58d085ee-1389-48a6-b185-a036265014d2-client-ca\") pod \"58d085ee-1389-48a6-b185-a036265014d2\" (UID: \"58d085ee-1389-48a6-b185-a036265014d2\") " Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.973590 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9szvm\" (UniqueName: \"kubernetes.io/projected/58d085ee-1389-48a6-b185-a036265014d2-kube-api-access-9szvm\") pod \"58d085ee-1389-48a6-b185-a036265014d2\" (UID: \"58d085ee-1389-48a6-b185-a036265014d2\") " Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.973631 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58d085ee-1389-48a6-b185-a036265014d2-config\") pod \"58d085ee-1389-48a6-b185-a036265014d2\" (UID: \"58d085ee-1389-48a6-b185-a036265014d2\") " Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.973665 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58d085ee-1389-48a6-b185-a036265014d2-serving-cert\") pod \"58d085ee-1389-48a6-b185-a036265014d2\" (UID: \"58d085ee-1389-48a6-b185-a036265014d2\") " Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.973933 4710 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e6867809-73e3-4291-97d4-cb38b0aeae7b-client-ca\") on node \"crc\" DevicePath \"\"" Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.973945 4710 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e6867809-73e3-4291-97d4-cb38b0aeae7b-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.973953 4710 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6867809-73e3-4291-97d4-cb38b0aeae7b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.973963 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bbk7\" (UniqueName: \"kubernetes.io/projected/e6867809-73e3-4291-97d4-cb38b0aeae7b-kube-api-access-6bbk7\") on node \"crc\" DevicePath \"\"" Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.973971 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6867809-73e3-4291-97d4-cb38b0aeae7b-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.974335 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58d085ee-1389-48a6-b185-a036265014d2-client-ca" (OuterVolumeSpecName: "client-ca") pod "58d085ee-1389-48a6-b185-a036265014d2" (UID: "58d085ee-1389-48a6-b185-a036265014d2"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.974549 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58d085ee-1389-48a6-b185-a036265014d2-config" (OuterVolumeSpecName: "config") pod "58d085ee-1389-48a6-b185-a036265014d2" (UID: "58d085ee-1389-48a6-b185-a036265014d2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.977291 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58d085ee-1389-48a6-b185-a036265014d2-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "58d085ee-1389-48a6-b185-a036265014d2" (UID: "58d085ee-1389-48a6-b185-a036265014d2"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:17:05 crc kubenswrapper[4710]: I1009 09:17:05.977704 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58d085ee-1389-48a6-b185-a036265014d2-kube-api-access-9szvm" (OuterVolumeSpecName: "kube-api-access-9szvm") pod "58d085ee-1389-48a6-b185-a036265014d2" (UID: "58d085ee-1389-48a6-b185-a036265014d2"). InnerVolumeSpecName "kube-api-access-9szvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.076199 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58d085ee-1389-48a6-b185-a036265014d2-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.076479 4710 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58d085ee-1389-48a6-b185-a036265014d2-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.076491 4710 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/58d085ee-1389-48a6-b185-a036265014d2-client-ca\") on node \"crc\" DevicePath \"\"" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.076503 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9szvm\" (UniqueName: \"kubernetes.io/projected/58d085ee-1389-48a6-b185-a036265014d2-kube-api-access-9szvm\") on node \"crc\" DevicePath \"\"" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.573324 4710 generic.go:334] "Generic (PLEG): container finished" podID="e6867809-73e3-4291-97d4-cb38b0aeae7b" containerID="2e6c0e5c5c55f61ce0b7b5971559baf917db34a010bcd0042d84d944bb5644a3" exitCode=0 Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.573376 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.573394 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" event={"ID":"e6867809-73e3-4291-97d4-cb38b0aeae7b","Type":"ContainerDied","Data":"2e6c0e5c5c55f61ce0b7b5971559baf917db34a010bcd0042d84d944bb5644a3"} Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.573494 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-lgdvx" event={"ID":"e6867809-73e3-4291-97d4-cb38b0aeae7b","Type":"ContainerDied","Data":"276b9748dbafbbc6d94ab25ead6692a1e2f096b7760f554f3f7cd7eef2ec3f34"} Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.573525 4710 scope.go:117] "RemoveContainer" containerID="2e6c0e5c5c55f61ce0b7b5971559baf917db34a010bcd0042d84d944bb5644a3" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.576880 4710 generic.go:334] "Generic (PLEG): container finished" podID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerID="4d64191b75158ae2723b09865a9bd6d7523a53aa9da415f79edde49e77da405d" exitCode=0 Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.577001 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerDied","Data":"4d64191b75158ae2723b09865a9bd6d7523a53aa9da415f79edde49e77da405d"} Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.577040 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerStarted","Data":"8f8033ef1c1546ba9de192c838c8a65cbd27bdb87c7505416703609919040c7b"} Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.578993 4710 generic.go:334] "Generic (PLEG): container finished" podID="58d085ee-1389-48a6-b185-a036265014d2" containerID="c445af8292f14e4f23f4d8167605efd9cd3f164168922228b3e780d4f1904a7b" exitCode=0 Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.579059 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" event={"ID":"58d085ee-1389-48a6-b185-a036265014d2","Type":"ContainerDied","Data":"c445af8292f14e4f23f4d8167605efd9cd3f164168922228b3e780d4f1904a7b"} Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.579079 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.579097 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2" event={"ID":"58d085ee-1389-48a6-b185-a036265014d2","Type":"ContainerDied","Data":"bd701f8b3e808ba28110ed4f1fa1436198312f7d6712a99fa5cd1cb2872f4151"} Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.593404 4710 scope.go:117] "RemoveContainer" containerID="2e6c0e5c5c55f61ce0b7b5971559baf917db34a010bcd0042d84d944bb5644a3" Oct 09 09:17:06 crc kubenswrapper[4710]: E1009 09:17:06.597068 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e6c0e5c5c55f61ce0b7b5971559baf917db34a010bcd0042d84d944bb5644a3\": container with ID starting with 2e6c0e5c5c55f61ce0b7b5971559baf917db34a010bcd0042d84d944bb5644a3 not found: ID does not exist" containerID="2e6c0e5c5c55f61ce0b7b5971559baf917db34a010bcd0042d84d944bb5644a3" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.597165 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e6c0e5c5c55f61ce0b7b5971559baf917db34a010bcd0042d84d944bb5644a3"} err="failed to get container status \"2e6c0e5c5c55f61ce0b7b5971559baf917db34a010bcd0042d84d944bb5644a3\": rpc error: code = NotFound desc = could not find container \"2e6c0e5c5c55f61ce0b7b5971559baf917db34a010bcd0042d84d944bb5644a3\": container with ID starting with 2e6c0e5c5c55f61ce0b7b5971559baf917db34a010bcd0042d84d944bb5644a3 not found: ID does not exist" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.597241 4710 scope.go:117] "RemoveContainer" containerID="67796ccaaae375d049d846cb98a7d56281b37ddbf6ec7f175f6fa0bd1c54ba36" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.607343 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lgdvx"] Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.616345 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-lgdvx"] Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.632646 4710 scope.go:117] "RemoveContainer" containerID="c445af8292f14e4f23f4d8167605efd9cd3f164168922228b3e780d4f1904a7b" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.634400 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2"] Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.636831 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-5gtj2"] Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.647788 4710 scope.go:117] "RemoveContainer" containerID="c445af8292f14e4f23f4d8167605efd9cd3f164168922228b3e780d4f1904a7b" Oct 09 09:17:06 crc kubenswrapper[4710]: E1009 09:17:06.648255 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c445af8292f14e4f23f4d8167605efd9cd3f164168922228b3e780d4f1904a7b\": container with ID starting with c445af8292f14e4f23f4d8167605efd9cd3f164168922228b3e780d4f1904a7b not found: ID does not exist" containerID="c445af8292f14e4f23f4d8167605efd9cd3f164168922228b3e780d4f1904a7b" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.648353 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c445af8292f14e4f23f4d8167605efd9cd3f164168922228b3e780d4f1904a7b"} err="failed to get container status \"c445af8292f14e4f23f4d8167605efd9cd3f164168922228b3e780d4f1904a7b\": rpc error: code = NotFound desc = could not find container \"c445af8292f14e4f23f4d8167605efd9cd3f164168922228b3e780d4f1904a7b\": container with ID starting with c445af8292f14e4f23f4d8167605efd9cd3f164168922228b3e780d4f1904a7b not found: ID does not exist" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.688628 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-66f8c9f9d6-9hkrw"] Oct 09 09:17:06 crc kubenswrapper[4710]: E1009 09:17:06.688927 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfbeb551-9915-4071-a67c-5a88443100f1" containerName="extract" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.688943 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfbeb551-9915-4071-a67c-5a88443100f1" containerName="extract" Oct 09 09:17:06 crc kubenswrapper[4710]: E1009 09:17:06.688959 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfbeb551-9915-4071-a67c-5a88443100f1" containerName="util" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.688966 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfbeb551-9915-4071-a67c-5a88443100f1" containerName="util" Oct 09 09:17:06 crc kubenswrapper[4710]: E1009 09:17:06.688972 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58d085ee-1389-48a6-b185-a036265014d2" containerName="route-controller-manager" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.688978 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="58d085ee-1389-48a6-b185-a036265014d2" containerName="route-controller-manager" Oct 09 09:17:06 crc kubenswrapper[4710]: E1009 09:17:06.688989 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6867809-73e3-4291-97d4-cb38b0aeae7b" containerName="controller-manager" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.688997 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6867809-73e3-4291-97d4-cb38b0aeae7b" containerName="controller-manager" Oct 09 09:17:06 crc kubenswrapper[4710]: E1009 09:17:06.689004 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfbeb551-9915-4071-a67c-5a88443100f1" containerName="pull" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.689010 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfbeb551-9915-4071-a67c-5a88443100f1" containerName="pull" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.689098 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfbeb551-9915-4071-a67c-5a88443100f1" containerName="extract" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.689107 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="58d085ee-1389-48a6-b185-a036265014d2" containerName="route-controller-manager" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.689115 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6867809-73e3-4291-97d4-cb38b0aeae7b" containerName="controller-manager" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.689535 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-66f8c9f9d6-9hkrw" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.692035 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.692191 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.693344 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.693412 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.693622 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.693621 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.699500 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-644fdc9947-pc5mh"] Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.701652 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.706734 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-644fdc9947-pc5mh"] Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.707239 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.707379 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.707832 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.710807 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.710807 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.711183 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.716679 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.742562 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-66f8c9f9d6-9hkrw"] Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.788206 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca-proxy-ca-bundles\") pod \"controller-manager-644fdc9947-pc5mh\" (UID: \"0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca\") " pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.788259 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcp9r\" (UniqueName: \"kubernetes.io/projected/0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca-kube-api-access-gcp9r\") pod \"controller-manager-644fdc9947-pc5mh\" (UID: \"0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca\") " pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.788307 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjpn9\" (UniqueName: \"kubernetes.io/projected/d02054de-25de-45a4-beb6-2d6bd03d8781-kube-api-access-qjpn9\") pod \"route-controller-manager-66f8c9f9d6-9hkrw\" (UID: \"d02054de-25de-45a4-beb6-2d6bd03d8781\") " pod="openshift-route-controller-manager/route-controller-manager-66f8c9f9d6-9hkrw" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.788363 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca-client-ca\") pod \"controller-manager-644fdc9947-pc5mh\" (UID: \"0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca\") " pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.788484 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca-serving-cert\") pod \"controller-manager-644fdc9947-pc5mh\" (UID: \"0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca\") " pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.788550 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d02054de-25de-45a4-beb6-2d6bd03d8781-config\") pod \"route-controller-manager-66f8c9f9d6-9hkrw\" (UID: \"d02054de-25de-45a4-beb6-2d6bd03d8781\") " pod="openshift-route-controller-manager/route-controller-manager-66f8c9f9d6-9hkrw" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.788580 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d02054de-25de-45a4-beb6-2d6bd03d8781-serving-cert\") pod \"route-controller-manager-66f8c9f9d6-9hkrw\" (UID: \"d02054de-25de-45a4-beb6-2d6bd03d8781\") " pod="openshift-route-controller-manager/route-controller-manager-66f8c9f9d6-9hkrw" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.788612 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca-config\") pod \"controller-manager-644fdc9947-pc5mh\" (UID: \"0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca\") " pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.788669 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d02054de-25de-45a4-beb6-2d6bd03d8781-client-ca\") pod \"route-controller-manager-66f8c9f9d6-9hkrw\" (UID: \"d02054de-25de-45a4-beb6-2d6bd03d8781\") " pod="openshift-route-controller-manager/route-controller-manager-66f8c9f9d6-9hkrw" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.822370 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58d085ee-1389-48a6-b185-a036265014d2" path="/var/lib/kubelet/pods/58d085ee-1389-48a6-b185-a036265014d2/volumes" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.823194 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6867809-73e3-4291-97d4-cb38b0aeae7b" path="/var/lib/kubelet/pods/e6867809-73e3-4291-97d4-cb38b0aeae7b/volumes" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.889790 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d02054de-25de-45a4-beb6-2d6bd03d8781-config\") pod \"route-controller-manager-66f8c9f9d6-9hkrw\" (UID: \"d02054de-25de-45a4-beb6-2d6bd03d8781\") " pod="openshift-route-controller-manager/route-controller-manager-66f8c9f9d6-9hkrw" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.889829 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d02054de-25de-45a4-beb6-2d6bd03d8781-serving-cert\") pod \"route-controller-manager-66f8c9f9d6-9hkrw\" (UID: \"d02054de-25de-45a4-beb6-2d6bd03d8781\") " pod="openshift-route-controller-manager/route-controller-manager-66f8c9f9d6-9hkrw" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.889858 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca-config\") pod \"controller-manager-644fdc9947-pc5mh\" (UID: \"0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca\") " pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.889882 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d02054de-25de-45a4-beb6-2d6bd03d8781-client-ca\") pod \"route-controller-manager-66f8c9f9d6-9hkrw\" (UID: \"d02054de-25de-45a4-beb6-2d6bd03d8781\") " pod="openshift-route-controller-manager/route-controller-manager-66f8c9f9d6-9hkrw" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.889940 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca-proxy-ca-bundles\") pod \"controller-manager-644fdc9947-pc5mh\" (UID: \"0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca\") " pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.889961 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcp9r\" (UniqueName: \"kubernetes.io/projected/0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca-kube-api-access-gcp9r\") pod \"controller-manager-644fdc9947-pc5mh\" (UID: \"0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca\") " pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.889983 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjpn9\" (UniqueName: \"kubernetes.io/projected/d02054de-25de-45a4-beb6-2d6bd03d8781-kube-api-access-qjpn9\") pod \"route-controller-manager-66f8c9f9d6-9hkrw\" (UID: \"d02054de-25de-45a4-beb6-2d6bd03d8781\") " pod="openshift-route-controller-manager/route-controller-manager-66f8c9f9d6-9hkrw" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.890003 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca-client-ca\") pod \"controller-manager-644fdc9947-pc5mh\" (UID: \"0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca\") " pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.890046 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca-serving-cert\") pod \"controller-manager-644fdc9947-pc5mh\" (UID: \"0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca\") " pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.891605 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca-config\") pod \"controller-manager-644fdc9947-pc5mh\" (UID: \"0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca\") " pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.891701 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca-proxy-ca-bundles\") pod \"controller-manager-644fdc9947-pc5mh\" (UID: \"0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca\") " pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.891941 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d02054de-25de-45a4-beb6-2d6bd03d8781-client-ca\") pod \"route-controller-manager-66f8c9f9d6-9hkrw\" (UID: \"d02054de-25de-45a4-beb6-2d6bd03d8781\") " pod="openshift-route-controller-manager/route-controller-manager-66f8c9f9d6-9hkrw" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.892550 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d02054de-25de-45a4-beb6-2d6bd03d8781-config\") pod \"route-controller-manager-66f8c9f9d6-9hkrw\" (UID: \"d02054de-25de-45a4-beb6-2d6bd03d8781\") " pod="openshift-route-controller-manager/route-controller-manager-66f8c9f9d6-9hkrw" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.892862 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca-client-ca\") pod \"controller-manager-644fdc9947-pc5mh\" (UID: \"0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca\") " pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.895143 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d02054de-25de-45a4-beb6-2d6bd03d8781-serving-cert\") pod \"route-controller-manager-66f8c9f9d6-9hkrw\" (UID: \"d02054de-25de-45a4-beb6-2d6bd03d8781\") " pod="openshift-route-controller-manager/route-controller-manager-66f8c9f9d6-9hkrw" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.896033 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca-serving-cert\") pod \"controller-manager-644fdc9947-pc5mh\" (UID: \"0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca\") " pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.905352 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjpn9\" (UniqueName: \"kubernetes.io/projected/d02054de-25de-45a4-beb6-2d6bd03d8781-kube-api-access-qjpn9\") pod \"route-controller-manager-66f8c9f9d6-9hkrw\" (UID: \"d02054de-25de-45a4-beb6-2d6bd03d8781\") " pod="openshift-route-controller-manager/route-controller-manager-66f8c9f9d6-9hkrw" Oct 09 09:17:06 crc kubenswrapper[4710]: I1009 09:17:06.905860 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcp9r\" (UniqueName: \"kubernetes.io/projected/0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca-kube-api-access-gcp9r\") pod \"controller-manager-644fdc9947-pc5mh\" (UID: \"0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca\") " pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" Oct 09 09:17:07 crc kubenswrapper[4710]: I1009 09:17:07.002269 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-66f8c9f9d6-9hkrw" Oct 09 09:17:07 crc kubenswrapper[4710]: I1009 09:17:07.020088 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" Oct 09 09:17:07 crc kubenswrapper[4710]: I1009 09:17:07.406664 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-66f8c9f9d6-9hkrw"] Oct 09 09:17:07 crc kubenswrapper[4710]: W1009 09:17:07.412343 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd02054de_25de_45a4_beb6_2d6bd03d8781.slice/crio-57ad1d4d9e9e25952d8a9f4e98551eb00a60798ed49af36a4c773adc6d76d78e WatchSource:0}: Error finding container 57ad1d4d9e9e25952d8a9f4e98551eb00a60798ed49af36a4c773adc6d76d78e: Status 404 returned error can't find the container with id 57ad1d4d9e9e25952d8a9f4e98551eb00a60798ed49af36a4c773adc6d76d78e Oct 09 09:17:07 crc kubenswrapper[4710]: I1009 09:17:07.440626 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-644fdc9947-pc5mh"] Oct 09 09:17:07 crc kubenswrapper[4710]: I1009 09:17:07.584959 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" event={"ID":"0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca","Type":"ContainerStarted","Data":"9dbf6273c7a9f564d500f0ce397d45ab19855679164c3750b9e86bc246991ba2"} Oct 09 09:17:07 crc kubenswrapper[4710]: I1009 09:17:07.585260 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" event={"ID":"0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca","Type":"ContainerStarted","Data":"6051f70636eb0e9f16232a08d40ced15b01ea585de57dfe8111128ad5f6fe40c"} Oct 09 09:17:07 crc kubenswrapper[4710]: I1009 09:17:07.585277 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" Oct 09 09:17:07 crc kubenswrapper[4710]: I1009 09:17:07.588561 4710 patch_prober.go:28] interesting pod/controller-manager-644fdc9947-pc5mh container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.67:8443/healthz\": dial tcp 10.217.0.67:8443: connect: connection refused" start-of-body= Oct 09 09:17:07 crc kubenswrapper[4710]: I1009 09:17:07.588601 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" podUID="0d12e9b1-c0a4-43fa-a62c-c2a5d6d456ca" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.67:8443/healthz\": dial tcp 10.217.0.67:8443: connect: connection refused" Oct 09 09:17:07 crc kubenswrapper[4710]: I1009 09:17:07.596848 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-66f8c9f9d6-9hkrw" event={"ID":"d02054de-25de-45a4-beb6-2d6bd03d8781","Type":"ContainerStarted","Data":"e33ec2afa0bed2b4a1c1f46945cf084f5649811a008ed09f31a8a5829fee1135"} Oct 09 09:17:07 crc kubenswrapper[4710]: I1009 09:17:07.596877 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-66f8c9f9d6-9hkrw" event={"ID":"d02054de-25de-45a4-beb6-2d6bd03d8781","Type":"ContainerStarted","Data":"57ad1d4d9e9e25952d8a9f4e98551eb00a60798ed49af36a4c773adc6d76d78e"} Oct 09 09:17:07 crc kubenswrapper[4710]: I1009 09:17:07.597569 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-66f8c9f9d6-9hkrw" Oct 09 09:17:07 crc kubenswrapper[4710]: I1009 09:17:07.607368 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" podStartSLOduration=2.607356689 podStartE2EDuration="2.607356689s" podCreationTimestamp="2025-10-09 09:17:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:17:07.603080506 +0000 UTC m=+751.093188902" watchObservedRunningTime="2025-10-09 09:17:07.607356689 +0000 UTC m=+751.097465085" Oct 09 09:17:07 crc kubenswrapper[4710]: I1009 09:17:07.623158 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-66f8c9f9d6-9hkrw" podStartSLOduration=2.623145581 podStartE2EDuration="2.623145581s" podCreationTimestamp="2025-10-09 09:17:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:17:07.6206051 +0000 UTC m=+751.110713497" watchObservedRunningTime="2025-10-09 09:17:07.623145581 +0000 UTC m=+751.113253978" Oct 09 09:17:07 crc kubenswrapper[4710]: I1009 09:17:07.999360 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-66f8c9f9d6-9hkrw" Oct 09 09:17:08 crc kubenswrapper[4710]: I1009 09:17:08.610691 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-644fdc9947-pc5mh" Oct 09 09:17:10 crc kubenswrapper[4710]: I1009 09:17:10.530644 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-848c57cb5c-lrs49"] Oct 09 09:17:10 crc kubenswrapper[4710]: I1009 09:17:10.531657 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-848c57cb5c-lrs49" Oct 09 09:17:10 crc kubenswrapper[4710]: I1009 09:17:10.541532 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-kbp4d" Oct 09 09:17:10 crc kubenswrapper[4710]: I1009 09:17:10.585978 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-848c57cb5c-lrs49"] Oct 09 09:17:10 crc kubenswrapper[4710]: I1009 09:17:10.638502 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4lbl5\" (UniqueName: \"kubernetes.io/projected/f23bbd5e-3d87-4396-aad0-9455c284fbf8-kube-api-access-4lbl5\") pod \"openstack-operator-controller-operator-848c57cb5c-lrs49\" (UID: \"f23bbd5e-3d87-4396-aad0-9455c284fbf8\") " pod="openstack-operators/openstack-operator-controller-operator-848c57cb5c-lrs49" Oct 09 09:17:10 crc kubenswrapper[4710]: I1009 09:17:10.739739 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4lbl5\" (UniqueName: \"kubernetes.io/projected/f23bbd5e-3d87-4396-aad0-9455c284fbf8-kube-api-access-4lbl5\") pod \"openstack-operator-controller-operator-848c57cb5c-lrs49\" (UID: \"f23bbd5e-3d87-4396-aad0-9455c284fbf8\") " pod="openstack-operators/openstack-operator-controller-operator-848c57cb5c-lrs49" Oct 09 09:17:10 crc kubenswrapper[4710]: I1009 09:17:10.768689 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4lbl5\" (UniqueName: \"kubernetes.io/projected/f23bbd5e-3d87-4396-aad0-9455c284fbf8-kube-api-access-4lbl5\") pod \"openstack-operator-controller-operator-848c57cb5c-lrs49\" (UID: \"f23bbd5e-3d87-4396-aad0-9455c284fbf8\") " pod="openstack-operators/openstack-operator-controller-operator-848c57cb5c-lrs49" Oct 09 09:17:10 crc kubenswrapper[4710]: I1009 09:17:10.846619 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-848c57cb5c-lrs49" Oct 09 09:17:11 crc kubenswrapper[4710]: I1009 09:17:11.235710 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-848c57cb5c-lrs49"] Oct 09 09:17:11 crc kubenswrapper[4710]: W1009 09:17:11.247182 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf23bbd5e_3d87_4396_aad0_9455c284fbf8.slice/crio-bc027b981db0f137691d3e740b7ea33dbca0643921236b7272e81d9deaded693 WatchSource:0}: Error finding container bc027b981db0f137691d3e740b7ea33dbca0643921236b7272e81d9deaded693: Status 404 returned error can't find the container with id bc027b981db0f137691d3e740b7ea33dbca0643921236b7272e81d9deaded693 Oct 09 09:17:11 crc kubenswrapper[4710]: I1009 09:17:11.618533 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-848c57cb5c-lrs49" event={"ID":"f23bbd5e-3d87-4396-aad0-9455c284fbf8","Type":"ContainerStarted","Data":"bc027b981db0f137691d3e740b7ea33dbca0643921236b7272e81d9deaded693"} Oct 09 09:17:12 crc kubenswrapper[4710]: I1009 09:17:12.888208 4710 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 09 09:17:16 crc kubenswrapper[4710]: I1009 09:17:16.669023 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-848c57cb5c-lrs49" event={"ID":"f23bbd5e-3d87-4396-aad0-9455c284fbf8","Type":"ContainerStarted","Data":"3c7501536e43e353babc277c326eacf5bfa1d79edebf018c9a7b70c402d5e083"} Oct 09 09:17:19 crc kubenswrapper[4710]: I1009 09:17:19.696845 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-848c57cb5c-lrs49" event={"ID":"f23bbd5e-3d87-4396-aad0-9455c284fbf8","Type":"ContainerStarted","Data":"6635320a6b5ccffe4427a3eebf9ee831d37183422680d55a89bfd37289989fd3"} Oct 09 09:17:19 crc kubenswrapper[4710]: I1009 09:17:19.697234 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-848c57cb5c-lrs49" Oct 09 09:17:19 crc kubenswrapper[4710]: I1009 09:17:19.734130 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-848c57cb5c-lrs49" podStartSLOduration=2.363506299 podStartE2EDuration="9.734111705s" podCreationTimestamp="2025-10-09 09:17:10 +0000 UTC" firstStartedPulling="2025-10-09 09:17:11.249479033 +0000 UTC m=+754.739587431" lastFinishedPulling="2025-10-09 09:17:18.620084439 +0000 UTC m=+762.110192837" observedRunningTime="2025-10-09 09:17:19.732984399 +0000 UTC m=+763.223092796" watchObservedRunningTime="2025-10-09 09:17:19.734111705 +0000 UTC m=+763.224220102" Oct 09 09:17:25 crc kubenswrapper[4710]: I1009 09:17:25.485788 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xrpxx"] Oct 09 09:17:25 crc kubenswrapper[4710]: I1009 09:17:25.488045 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xrpxx" Oct 09 09:17:25 crc kubenswrapper[4710]: I1009 09:17:25.508525 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xrpxx"] Oct 09 09:17:25 crc kubenswrapper[4710]: I1009 09:17:25.600134 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rllzf\" (UniqueName: \"kubernetes.io/projected/3a5f352f-f75b-4ff6-854d-ee26c5a48017-kube-api-access-rllzf\") pod \"redhat-marketplace-xrpxx\" (UID: \"3a5f352f-f75b-4ff6-854d-ee26c5a48017\") " pod="openshift-marketplace/redhat-marketplace-xrpxx" Oct 09 09:17:25 crc kubenswrapper[4710]: I1009 09:17:25.600229 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a5f352f-f75b-4ff6-854d-ee26c5a48017-utilities\") pod \"redhat-marketplace-xrpxx\" (UID: \"3a5f352f-f75b-4ff6-854d-ee26c5a48017\") " pod="openshift-marketplace/redhat-marketplace-xrpxx" Oct 09 09:17:25 crc kubenswrapper[4710]: I1009 09:17:25.600503 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a5f352f-f75b-4ff6-854d-ee26c5a48017-catalog-content\") pod \"redhat-marketplace-xrpxx\" (UID: \"3a5f352f-f75b-4ff6-854d-ee26c5a48017\") " pod="openshift-marketplace/redhat-marketplace-xrpxx" Oct 09 09:17:25 crc kubenswrapper[4710]: I1009 09:17:25.701788 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a5f352f-f75b-4ff6-854d-ee26c5a48017-catalog-content\") pod \"redhat-marketplace-xrpxx\" (UID: \"3a5f352f-f75b-4ff6-854d-ee26c5a48017\") " pod="openshift-marketplace/redhat-marketplace-xrpxx" Oct 09 09:17:25 crc kubenswrapper[4710]: I1009 09:17:25.702035 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rllzf\" (UniqueName: \"kubernetes.io/projected/3a5f352f-f75b-4ff6-854d-ee26c5a48017-kube-api-access-rllzf\") pod \"redhat-marketplace-xrpxx\" (UID: \"3a5f352f-f75b-4ff6-854d-ee26c5a48017\") " pod="openshift-marketplace/redhat-marketplace-xrpxx" Oct 09 09:17:25 crc kubenswrapper[4710]: I1009 09:17:25.702149 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a5f352f-f75b-4ff6-854d-ee26c5a48017-utilities\") pod \"redhat-marketplace-xrpxx\" (UID: \"3a5f352f-f75b-4ff6-854d-ee26c5a48017\") " pod="openshift-marketplace/redhat-marketplace-xrpxx" Oct 09 09:17:25 crc kubenswrapper[4710]: I1009 09:17:25.702387 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a5f352f-f75b-4ff6-854d-ee26c5a48017-catalog-content\") pod \"redhat-marketplace-xrpxx\" (UID: \"3a5f352f-f75b-4ff6-854d-ee26c5a48017\") " pod="openshift-marketplace/redhat-marketplace-xrpxx" Oct 09 09:17:25 crc kubenswrapper[4710]: I1009 09:17:25.702606 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a5f352f-f75b-4ff6-854d-ee26c5a48017-utilities\") pod \"redhat-marketplace-xrpxx\" (UID: \"3a5f352f-f75b-4ff6-854d-ee26c5a48017\") " pod="openshift-marketplace/redhat-marketplace-xrpxx" Oct 09 09:17:25 crc kubenswrapper[4710]: I1009 09:17:25.724753 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rllzf\" (UniqueName: \"kubernetes.io/projected/3a5f352f-f75b-4ff6-854d-ee26c5a48017-kube-api-access-rllzf\") pod \"redhat-marketplace-xrpxx\" (UID: \"3a5f352f-f75b-4ff6-854d-ee26c5a48017\") " pod="openshift-marketplace/redhat-marketplace-xrpxx" Oct 09 09:17:25 crc kubenswrapper[4710]: I1009 09:17:25.807077 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xrpxx" Oct 09 09:17:26 crc kubenswrapper[4710]: I1009 09:17:26.184095 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xrpxx"] Oct 09 09:17:26 crc kubenswrapper[4710]: I1009 09:17:26.738244 4710 generic.go:334] "Generic (PLEG): container finished" podID="3a5f352f-f75b-4ff6-854d-ee26c5a48017" containerID="6caa330f72fbe473756e4adb083ec2dc609bb3674c34dd89d261a53960bf4776" exitCode=0 Oct 09 09:17:26 crc kubenswrapper[4710]: I1009 09:17:26.738308 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xrpxx" event={"ID":"3a5f352f-f75b-4ff6-854d-ee26c5a48017","Type":"ContainerDied","Data":"6caa330f72fbe473756e4adb083ec2dc609bb3674c34dd89d261a53960bf4776"} Oct 09 09:17:26 crc kubenswrapper[4710]: I1009 09:17:26.738616 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xrpxx" event={"ID":"3a5f352f-f75b-4ff6-854d-ee26c5a48017","Type":"ContainerStarted","Data":"88741273ce94a46cd06184223f320d1b490b4d92e01f37cdea579c169b029eea"} Oct 09 09:17:27 crc kubenswrapper[4710]: I1009 09:17:27.746151 4710 generic.go:334] "Generic (PLEG): container finished" podID="3a5f352f-f75b-4ff6-854d-ee26c5a48017" containerID="0f8a659615cbeb4948f02f414d7eec0648114f1bae985fe682216bdcce2ded90" exitCode=0 Oct 09 09:17:27 crc kubenswrapper[4710]: I1009 09:17:27.746216 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xrpxx" event={"ID":"3a5f352f-f75b-4ff6-854d-ee26c5a48017","Type":"ContainerDied","Data":"0f8a659615cbeb4948f02f414d7eec0648114f1bae985fe682216bdcce2ded90"} Oct 09 09:17:28 crc kubenswrapper[4710]: I1009 09:17:28.255750 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-q4wrj"] Oct 09 09:17:28 crc kubenswrapper[4710]: I1009 09:17:28.258482 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q4wrj" Oct 09 09:17:28 crc kubenswrapper[4710]: I1009 09:17:28.277053 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q4wrj"] Oct 09 09:17:28 crc kubenswrapper[4710]: I1009 09:17:28.336096 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81778f03-2f37-4674-bae2-c056f5b55124-utilities\") pod \"community-operators-q4wrj\" (UID: \"81778f03-2f37-4674-bae2-c056f5b55124\") " pod="openshift-marketplace/community-operators-q4wrj" Oct 09 09:17:28 crc kubenswrapper[4710]: I1009 09:17:28.336191 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2rll\" (UniqueName: \"kubernetes.io/projected/81778f03-2f37-4674-bae2-c056f5b55124-kube-api-access-l2rll\") pod \"community-operators-q4wrj\" (UID: \"81778f03-2f37-4674-bae2-c056f5b55124\") " pod="openshift-marketplace/community-operators-q4wrj" Oct 09 09:17:28 crc kubenswrapper[4710]: I1009 09:17:28.336230 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81778f03-2f37-4674-bae2-c056f5b55124-catalog-content\") pod \"community-operators-q4wrj\" (UID: \"81778f03-2f37-4674-bae2-c056f5b55124\") " pod="openshift-marketplace/community-operators-q4wrj" Oct 09 09:17:28 crc kubenswrapper[4710]: I1009 09:17:28.437305 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2rll\" (UniqueName: \"kubernetes.io/projected/81778f03-2f37-4674-bae2-c056f5b55124-kube-api-access-l2rll\") pod \"community-operators-q4wrj\" (UID: \"81778f03-2f37-4674-bae2-c056f5b55124\") " pod="openshift-marketplace/community-operators-q4wrj" Oct 09 09:17:28 crc kubenswrapper[4710]: I1009 09:17:28.437368 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81778f03-2f37-4674-bae2-c056f5b55124-catalog-content\") pod \"community-operators-q4wrj\" (UID: \"81778f03-2f37-4674-bae2-c056f5b55124\") " pod="openshift-marketplace/community-operators-q4wrj" Oct 09 09:17:28 crc kubenswrapper[4710]: I1009 09:17:28.437411 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81778f03-2f37-4674-bae2-c056f5b55124-utilities\") pod \"community-operators-q4wrj\" (UID: \"81778f03-2f37-4674-bae2-c056f5b55124\") " pod="openshift-marketplace/community-operators-q4wrj" Oct 09 09:17:28 crc kubenswrapper[4710]: I1009 09:17:28.437975 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81778f03-2f37-4674-bae2-c056f5b55124-utilities\") pod \"community-operators-q4wrj\" (UID: \"81778f03-2f37-4674-bae2-c056f5b55124\") " pod="openshift-marketplace/community-operators-q4wrj" Oct 09 09:17:28 crc kubenswrapper[4710]: I1009 09:17:28.438471 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81778f03-2f37-4674-bae2-c056f5b55124-catalog-content\") pod \"community-operators-q4wrj\" (UID: \"81778f03-2f37-4674-bae2-c056f5b55124\") " pod="openshift-marketplace/community-operators-q4wrj" Oct 09 09:17:28 crc kubenswrapper[4710]: I1009 09:17:28.471151 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2rll\" (UniqueName: \"kubernetes.io/projected/81778f03-2f37-4674-bae2-c056f5b55124-kube-api-access-l2rll\") pod \"community-operators-q4wrj\" (UID: \"81778f03-2f37-4674-bae2-c056f5b55124\") " pod="openshift-marketplace/community-operators-q4wrj" Oct 09 09:17:28 crc kubenswrapper[4710]: I1009 09:17:28.572147 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q4wrj" Oct 09 09:17:28 crc kubenswrapper[4710]: I1009 09:17:28.755675 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xrpxx" event={"ID":"3a5f352f-f75b-4ff6-854d-ee26c5a48017","Type":"ContainerStarted","Data":"a653b5bea0f714764f5e9625d87c155fac783e4775ecb9fb09c90507e04eb7cb"} Oct 09 09:17:28 crc kubenswrapper[4710]: I1009 09:17:28.776067 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xrpxx" podStartSLOduration=2.078335062 podStartE2EDuration="3.776048335s" podCreationTimestamp="2025-10-09 09:17:25 +0000 UTC" firstStartedPulling="2025-10-09 09:17:26.743571776 +0000 UTC m=+770.233680183" lastFinishedPulling="2025-10-09 09:17:28.441285059 +0000 UTC m=+771.931393456" observedRunningTime="2025-10-09 09:17:28.773413466 +0000 UTC m=+772.263521863" watchObservedRunningTime="2025-10-09 09:17:28.776048335 +0000 UTC m=+772.266156731" Oct 09 09:17:29 crc kubenswrapper[4710]: I1009 09:17:29.037581 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q4wrj"] Oct 09 09:17:29 crc kubenswrapper[4710]: I1009 09:17:29.764512 4710 generic.go:334] "Generic (PLEG): container finished" podID="81778f03-2f37-4674-bae2-c056f5b55124" containerID="a3b6c2928e3ed34659bc058a8be72f7dd8b9138972135f241f6e50b73a40b4f3" exitCode=0 Oct 09 09:17:29 crc kubenswrapper[4710]: I1009 09:17:29.764560 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q4wrj" event={"ID":"81778f03-2f37-4674-bae2-c056f5b55124","Type":"ContainerDied","Data":"a3b6c2928e3ed34659bc058a8be72f7dd8b9138972135f241f6e50b73a40b4f3"} Oct 09 09:17:29 crc kubenswrapper[4710]: I1009 09:17:29.764600 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q4wrj" event={"ID":"81778f03-2f37-4674-bae2-c056f5b55124","Type":"ContainerStarted","Data":"6b2fcffd6636515a6ef585a97097b76e058d29a9e7b0d3bd3750205e416074fb"} Oct 09 09:17:30 crc kubenswrapper[4710]: I1009 09:17:30.772701 4710 generic.go:334] "Generic (PLEG): container finished" podID="81778f03-2f37-4674-bae2-c056f5b55124" containerID="778b6ebeb2907c54e783382d6bd8bc32f2469730bb3086f194bb8ce1e29f3322" exitCode=0 Oct 09 09:17:30 crc kubenswrapper[4710]: I1009 09:17:30.772753 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q4wrj" event={"ID":"81778f03-2f37-4674-bae2-c056f5b55124","Type":"ContainerDied","Data":"778b6ebeb2907c54e783382d6bd8bc32f2469730bb3086f194bb8ce1e29f3322"} Oct 09 09:17:30 crc kubenswrapper[4710]: I1009 09:17:30.849946 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-848c57cb5c-lrs49" Oct 09 09:17:31 crc kubenswrapper[4710]: I1009 09:17:31.778983 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q4wrj" event={"ID":"81778f03-2f37-4674-bae2-c056f5b55124","Type":"ContainerStarted","Data":"312e9877f86b7f629459a0b26d5d8ab26ec0c5a66de1ac03100c5c874607982b"} Oct 09 09:17:31 crc kubenswrapper[4710]: I1009 09:17:31.804486 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-q4wrj" podStartSLOduration=2.289442605 podStartE2EDuration="3.80446162s" podCreationTimestamp="2025-10-09 09:17:28 +0000 UTC" firstStartedPulling="2025-10-09 09:17:29.76656419 +0000 UTC m=+773.256672587" lastFinishedPulling="2025-10-09 09:17:31.281583204 +0000 UTC m=+774.771691602" observedRunningTime="2025-10-09 09:17:31.801166486 +0000 UTC m=+775.291274883" watchObservedRunningTime="2025-10-09 09:17:31.80446162 +0000 UTC m=+775.294570016" Oct 09 09:17:35 crc kubenswrapper[4710]: I1009 09:17:35.807656 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xrpxx" Oct 09 09:17:35 crc kubenswrapper[4710]: I1009 09:17:35.807970 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xrpxx" Oct 09 09:17:35 crc kubenswrapper[4710]: I1009 09:17:35.871892 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xrpxx" Oct 09 09:17:36 crc kubenswrapper[4710]: I1009 09:17:36.847620 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xrpxx" Oct 09 09:17:36 crc kubenswrapper[4710]: I1009 09:17:36.884465 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xrpxx"] Oct 09 09:17:38 crc kubenswrapper[4710]: I1009 09:17:38.572872 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-q4wrj" Oct 09 09:17:38 crc kubenswrapper[4710]: I1009 09:17:38.573309 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-q4wrj" Oct 09 09:17:38 crc kubenswrapper[4710]: I1009 09:17:38.612129 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-q4wrj" Oct 09 09:17:38 crc kubenswrapper[4710]: I1009 09:17:38.818038 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xrpxx" podUID="3a5f352f-f75b-4ff6-854d-ee26c5a48017" containerName="registry-server" containerID="cri-o://a653b5bea0f714764f5e9625d87c155fac783e4775ecb9fb09c90507e04eb7cb" gracePeriod=2 Oct 09 09:17:38 crc kubenswrapper[4710]: I1009 09:17:38.854565 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-q4wrj" Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.254842 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xrpxx" Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.281255 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a5f352f-f75b-4ff6-854d-ee26c5a48017-catalog-content\") pod \"3a5f352f-f75b-4ff6-854d-ee26c5a48017\" (UID: \"3a5f352f-f75b-4ff6-854d-ee26c5a48017\") " Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.281409 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a5f352f-f75b-4ff6-854d-ee26c5a48017-utilities\") pod \"3a5f352f-f75b-4ff6-854d-ee26c5a48017\" (UID: \"3a5f352f-f75b-4ff6-854d-ee26c5a48017\") " Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.281459 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rllzf\" (UniqueName: \"kubernetes.io/projected/3a5f352f-f75b-4ff6-854d-ee26c5a48017-kube-api-access-rllzf\") pod \"3a5f352f-f75b-4ff6-854d-ee26c5a48017\" (UID: \"3a5f352f-f75b-4ff6-854d-ee26c5a48017\") " Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.283877 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a5f352f-f75b-4ff6-854d-ee26c5a48017-utilities" (OuterVolumeSpecName: "utilities") pod "3a5f352f-f75b-4ff6-854d-ee26c5a48017" (UID: "3a5f352f-f75b-4ff6-854d-ee26c5a48017"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.297069 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a5f352f-f75b-4ff6-854d-ee26c5a48017-kube-api-access-rllzf" (OuterVolumeSpecName: "kube-api-access-rllzf") pod "3a5f352f-f75b-4ff6-854d-ee26c5a48017" (UID: "3a5f352f-f75b-4ff6-854d-ee26c5a48017"). InnerVolumeSpecName "kube-api-access-rllzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.300191 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a5f352f-f75b-4ff6-854d-ee26c5a48017-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3a5f352f-f75b-4ff6-854d-ee26c5a48017" (UID: "3a5f352f-f75b-4ff6-854d-ee26c5a48017"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.383834 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a5f352f-f75b-4ff6-854d-ee26c5a48017-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.383866 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a5f352f-f75b-4ff6-854d-ee26c5a48017-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.383878 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rllzf\" (UniqueName: \"kubernetes.io/projected/3a5f352f-f75b-4ff6-854d-ee26c5a48017-kube-api-access-rllzf\") on node \"crc\" DevicePath \"\"" Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.721526 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q4wrj"] Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.824877 4710 generic.go:334] "Generic (PLEG): container finished" podID="3a5f352f-f75b-4ff6-854d-ee26c5a48017" containerID="a653b5bea0f714764f5e9625d87c155fac783e4775ecb9fb09c90507e04eb7cb" exitCode=0 Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.824978 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xrpxx" Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.824927 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xrpxx" event={"ID":"3a5f352f-f75b-4ff6-854d-ee26c5a48017","Type":"ContainerDied","Data":"a653b5bea0f714764f5e9625d87c155fac783e4775ecb9fb09c90507e04eb7cb"} Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.825040 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xrpxx" event={"ID":"3a5f352f-f75b-4ff6-854d-ee26c5a48017","Type":"ContainerDied","Data":"88741273ce94a46cd06184223f320d1b490b4d92e01f37cdea579c169b029eea"} Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.825057 4710 scope.go:117] "RemoveContainer" containerID="a653b5bea0f714764f5e9625d87c155fac783e4775ecb9fb09c90507e04eb7cb" Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.842196 4710 scope.go:117] "RemoveContainer" containerID="0f8a659615cbeb4948f02f414d7eec0648114f1bae985fe682216bdcce2ded90" Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.856972 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xrpxx"] Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.860087 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xrpxx"] Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.863957 4710 scope.go:117] "RemoveContainer" containerID="6caa330f72fbe473756e4adb083ec2dc609bb3674c34dd89d261a53960bf4776" Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.878545 4710 scope.go:117] "RemoveContainer" containerID="a653b5bea0f714764f5e9625d87c155fac783e4775ecb9fb09c90507e04eb7cb" Oct 09 09:17:39 crc kubenswrapper[4710]: E1009 09:17:39.878883 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a653b5bea0f714764f5e9625d87c155fac783e4775ecb9fb09c90507e04eb7cb\": container with ID starting with a653b5bea0f714764f5e9625d87c155fac783e4775ecb9fb09c90507e04eb7cb not found: ID does not exist" containerID="a653b5bea0f714764f5e9625d87c155fac783e4775ecb9fb09c90507e04eb7cb" Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.878912 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a653b5bea0f714764f5e9625d87c155fac783e4775ecb9fb09c90507e04eb7cb"} err="failed to get container status \"a653b5bea0f714764f5e9625d87c155fac783e4775ecb9fb09c90507e04eb7cb\": rpc error: code = NotFound desc = could not find container \"a653b5bea0f714764f5e9625d87c155fac783e4775ecb9fb09c90507e04eb7cb\": container with ID starting with a653b5bea0f714764f5e9625d87c155fac783e4775ecb9fb09c90507e04eb7cb not found: ID does not exist" Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.878930 4710 scope.go:117] "RemoveContainer" containerID="0f8a659615cbeb4948f02f414d7eec0648114f1bae985fe682216bdcce2ded90" Oct 09 09:17:39 crc kubenswrapper[4710]: E1009 09:17:39.879867 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f8a659615cbeb4948f02f414d7eec0648114f1bae985fe682216bdcce2ded90\": container with ID starting with 0f8a659615cbeb4948f02f414d7eec0648114f1bae985fe682216bdcce2ded90 not found: ID does not exist" containerID="0f8a659615cbeb4948f02f414d7eec0648114f1bae985fe682216bdcce2ded90" Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.879888 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f8a659615cbeb4948f02f414d7eec0648114f1bae985fe682216bdcce2ded90"} err="failed to get container status \"0f8a659615cbeb4948f02f414d7eec0648114f1bae985fe682216bdcce2ded90\": rpc error: code = NotFound desc = could not find container \"0f8a659615cbeb4948f02f414d7eec0648114f1bae985fe682216bdcce2ded90\": container with ID starting with 0f8a659615cbeb4948f02f414d7eec0648114f1bae985fe682216bdcce2ded90 not found: ID does not exist" Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.879901 4710 scope.go:117] "RemoveContainer" containerID="6caa330f72fbe473756e4adb083ec2dc609bb3674c34dd89d261a53960bf4776" Oct 09 09:17:39 crc kubenswrapper[4710]: E1009 09:17:39.880188 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6caa330f72fbe473756e4adb083ec2dc609bb3674c34dd89d261a53960bf4776\": container with ID starting with 6caa330f72fbe473756e4adb083ec2dc609bb3674c34dd89d261a53960bf4776 not found: ID does not exist" containerID="6caa330f72fbe473756e4adb083ec2dc609bb3674c34dd89d261a53960bf4776" Oct 09 09:17:39 crc kubenswrapper[4710]: I1009 09:17:39.880227 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6caa330f72fbe473756e4adb083ec2dc609bb3674c34dd89d261a53960bf4776"} err="failed to get container status \"6caa330f72fbe473756e4adb083ec2dc609bb3674c34dd89d261a53960bf4776\": rpc error: code = NotFound desc = could not find container \"6caa330f72fbe473756e4adb083ec2dc609bb3674c34dd89d261a53960bf4776\": container with ID starting with 6caa330f72fbe473756e4adb083ec2dc609bb3674c34dd89d261a53960bf4776 not found: ID does not exist" Oct 09 09:17:40 crc kubenswrapper[4710]: I1009 09:17:40.822822 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a5f352f-f75b-4ff6-854d-ee26c5a48017" path="/var/lib/kubelet/pods/3a5f352f-f75b-4ff6-854d-ee26c5a48017/volumes" Oct 09 09:17:40 crc kubenswrapper[4710]: I1009 09:17:40.832203 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-q4wrj" podUID="81778f03-2f37-4674-bae2-c056f5b55124" containerName="registry-server" containerID="cri-o://312e9877f86b7f629459a0b26d5d8ab26ec0c5a66de1ac03100c5c874607982b" gracePeriod=2 Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.418306 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q4wrj" Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.510936 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81778f03-2f37-4674-bae2-c056f5b55124-utilities\") pod \"81778f03-2f37-4674-bae2-c056f5b55124\" (UID: \"81778f03-2f37-4674-bae2-c056f5b55124\") " Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.511012 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l2rll\" (UniqueName: \"kubernetes.io/projected/81778f03-2f37-4674-bae2-c056f5b55124-kube-api-access-l2rll\") pod \"81778f03-2f37-4674-bae2-c056f5b55124\" (UID: \"81778f03-2f37-4674-bae2-c056f5b55124\") " Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.511048 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81778f03-2f37-4674-bae2-c056f5b55124-catalog-content\") pod \"81778f03-2f37-4674-bae2-c056f5b55124\" (UID: \"81778f03-2f37-4674-bae2-c056f5b55124\") " Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.514598 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81778f03-2f37-4674-bae2-c056f5b55124-utilities" (OuterVolumeSpecName: "utilities") pod "81778f03-2f37-4674-bae2-c056f5b55124" (UID: "81778f03-2f37-4674-bae2-c056f5b55124"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.528732 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81778f03-2f37-4674-bae2-c056f5b55124-kube-api-access-l2rll" (OuterVolumeSpecName: "kube-api-access-l2rll") pod "81778f03-2f37-4674-bae2-c056f5b55124" (UID: "81778f03-2f37-4674-bae2-c056f5b55124"). InnerVolumeSpecName "kube-api-access-l2rll". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.551184 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81778f03-2f37-4674-bae2-c056f5b55124-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "81778f03-2f37-4674-bae2-c056f5b55124" (UID: "81778f03-2f37-4674-bae2-c056f5b55124"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.612103 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81778f03-2f37-4674-bae2-c056f5b55124-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.612126 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l2rll\" (UniqueName: \"kubernetes.io/projected/81778f03-2f37-4674-bae2-c056f5b55124-kube-api-access-l2rll\") on node \"crc\" DevicePath \"\"" Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.612135 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81778f03-2f37-4674-bae2-c056f5b55124-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.838482 4710 generic.go:334] "Generic (PLEG): container finished" podID="81778f03-2f37-4674-bae2-c056f5b55124" containerID="312e9877f86b7f629459a0b26d5d8ab26ec0c5a66de1ac03100c5c874607982b" exitCode=0 Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.838520 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q4wrj" event={"ID":"81778f03-2f37-4674-bae2-c056f5b55124","Type":"ContainerDied","Data":"312e9877f86b7f629459a0b26d5d8ab26ec0c5a66de1ac03100c5c874607982b"} Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.838541 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q4wrj" event={"ID":"81778f03-2f37-4674-bae2-c056f5b55124","Type":"ContainerDied","Data":"6b2fcffd6636515a6ef585a97097b76e058d29a9e7b0d3bd3750205e416074fb"} Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.838559 4710 scope.go:117] "RemoveContainer" containerID="312e9877f86b7f629459a0b26d5d8ab26ec0c5a66de1ac03100c5c874607982b" Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.839358 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q4wrj" Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.856066 4710 scope.go:117] "RemoveContainer" containerID="778b6ebeb2907c54e783382d6bd8bc32f2469730bb3086f194bb8ce1e29f3322" Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.868019 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q4wrj"] Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.870979 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-q4wrj"] Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.882516 4710 scope.go:117] "RemoveContainer" containerID="a3b6c2928e3ed34659bc058a8be72f7dd8b9138972135f241f6e50b73a40b4f3" Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.894157 4710 scope.go:117] "RemoveContainer" containerID="312e9877f86b7f629459a0b26d5d8ab26ec0c5a66de1ac03100c5c874607982b" Oct 09 09:17:41 crc kubenswrapper[4710]: E1009 09:17:41.894470 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"312e9877f86b7f629459a0b26d5d8ab26ec0c5a66de1ac03100c5c874607982b\": container with ID starting with 312e9877f86b7f629459a0b26d5d8ab26ec0c5a66de1ac03100c5c874607982b not found: ID does not exist" containerID="312e9877f86b7f629459a0b26d5d8ab26ec0c5a66de1ac03100c5c874607982b" Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.894496 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"312e9877f86b7f629459a0b26d5d8ab26ec0c5a66de1ac03100c5c874607982b"} err="failed to get container status \"312e9877f86b7f629459a0b26d5d8ab26ec0c5a66de1ac03100c5c874607982b\": rpc error: code = NotFound desc = could not find container \"312e9877f86b7f629459a0b26d5d8ab26ec0c5a66de1ac03100c5c874607982b\": container with ID starting with 312e9877f86b7f629459a0b26d5d8ab26ec0c5a66de1ac03100c5c874607982b not found: ID does not exist" Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.894516 4710 scope.go:117] "RemoveContainer" containerID="778b6ebeb2907c54e783382d6bd8bc32f2469730bb3086f194bb8ce1e29f3322" Oct 09 09:17:41 crc kubenswrapper[4710]: E1009 09:17:41.894797 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"778b6ebeb2907c54e783382d6bd8bc32f2469730bb3086f194bb8ce1e29f3322\": container with ID starting with 778b6ebeb2907c54e783382d6bd8bc32f2469730bb3086f194bb8ce1e29f3322 not found: ID does not exist" containerID="778b6ebeb2907c54e783382d6bd8bc32f2469730bb3086f194bb8ce1e29f3322" Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.894828 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"778b6ebeb2907c54e783382d6bd8bc32f2469730bb3086f194bb8ce1e29f3322"} err="failed to get container status \"778b6ebeb2907c54e783382d6bd8bc32f2469730bb3086f194bb8ce1e29f3322\": rpc error: code = NotFound desc = could not find container \"778b6ebeb2907c54e783382d6bd8bc32f2469730bb3086f194bb8ce1e29f3322\": container with ID starting with 778b6ebeb2907c54e783382d6bd8bc32f2469730bb3086f194bb8ce1e29f3322 not found: ID does not exist" Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.894852 4710 scope.go:117] "RemoveContainer" containerID="a3b6c2928e3ed34659bc058a8be72f7dd8b9138972135f241f6e50b73a40b4f3" Oct 09 09:17:41 crc kubenswrapper[4710]: E1009 09:17:41.895108 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3b6c2928e3ed34659bc058a8be72f7dd8b9138972135f241f6e50b73a40b4f3\": container with ID starting with a3b6c2928e3ed34659bc058a8be72f7dd8b9138972135f241f6e50b73a40b4f3 not found: ID does not exist" containerID="a3b6c2928e3ed34659bc058a8be72f7dd8b9138972135f241f6e50b73a40b4f3" Oct 09 09:17:41 crc kubenswrapper[4710]: I1009 09:17:41.895135 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3b6c2928e3ed34659bc058a8be72f7dd8b9138972135f241f6e50b73a40b4f3"} err="failed to get container status \"a3b6c2928e3ed34659bc058a8be72f7dd8b9138972135f241f6e50b73a40b4f3\": rpc error: code = NotFound desc = could not find container \"a3b6c2928e3ed34659bc058a8be72f7dd8b9138972135f241f6e50b73a40b4f3\": container with ID starting with a3b6c2928e3ed34659bc058a8be72f7dd8b9138972135f241f6e50b73a40b4f3 not found: ID does not exist" Oct 09 09:17:42 crc kubenswrapper[4710]: I1009 09:17:42.835558 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81778f03-2f37-4674-bae2-c056f5b55124" path="/var/lib/kubelet/pods/81778f03-2f37-4674-bae2-c056f5b55124/volumes" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.221275 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-658bdf4b74-mvjkw"] Oct 09 09:17:46 crc kubenswrapper[4710]: E1009 09:17:46.222150 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81778f03-2f37-4674-bae2-c056f5b55124" containerName="extract-content" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.222164 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="81778f03-2f37-4674-bae2-c056f5b55124" containerName="extract-content" Oct 09 09:17:46 crc kubenswrapper[4710]: E1009 09:17:46.222172 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81778f03-2f37-4674-bae2-c056f5b55124" containerName="registry-server" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.222178 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="81778f03-2f37-4674-bae2-c056f5b55124" containerName="registry-server" Oct 09 09:17:46 crc kubenswrapper[4710]: E1009 09:17:46.222189 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81778f03-2f37-4674-bae2-c056f5b55124" containerName="extract-utilities" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.222196 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="81778f03-2f37-4674-bae2-c056f5b55124" containerName="extract-utilities" Oct 09 09:17:46 crc kubenswrapper[4710]: E1009 09:17:46.222206 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a5f352f-f75b-4ff6-854d-ee26c5a48017" containerName="extract-utilities" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.222213 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a5f352f-f75b-4ff6-854d-ee26c5a48017" containerName="extract-utilities" Oct 09 09:17:46 crc kubenswrapper[4710]: E1009 09:17:46.222220 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a5f352f-f75b-4ff6-854d-ee26c5a48017" containerName="extract-content" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.222225 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a5f352f-f75b-4ff6-854d-ee26c5a48017" containerName="extract-content" Oct 09 09:17:46 crc kubenswrapper[4710]: E1009 09:17:46.222233 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a5f352f-f75b-4ff6-854d-ee26c5a48017" containerName="registry-server" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.222239 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a5f352f-f75b-4ff6-854d-ee26c5a48017" containerName="registry-server" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.222345 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="81778f03-2f37-4674-bae2-c056f5b55124" containerName="registry-server" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.222360 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a5f352f-f75b-4ff6-854d-ee26c5a48017" containerName="registry-server" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.222928 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-mvjkw" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.225473 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-rcxfh" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.231578 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7b7fb68549-7t5cw"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.232484 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-7t5cw" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.234086 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-2cc7f" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.252409 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-658bdf4b74-mvjkw"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.279420 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7b7fb68549-7t5cw"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.283269 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-85d5d9dd78-qq9g6"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.284153 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-qq9g6" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.289779 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-kp4hp" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.294077 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjrc7\" (UniqueName: \"kubernetes.io/projected/8437ff8a-3892-464b-963b-d5afaf9599dc-kube-api-access-tjrc7\") pod \"barbican-operator-controller-manager-658bdf4b74-mvjkw\" (UID: \"8437ff8a-3892-464b-963b-d5afaf9599dc\") " pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-mvjkw" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.294233 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8rsr\" (UniqueName: \"kubernetes.io/projected/26ad6e31-7002-4043-a971-aa507f4118bf-kube-api-access-v8rsr\") pod \"cinder-operator-controller-manager-7b7fb68549-7t5cw\" (UID: \"26ad6e31-7002-4043-a971-aa507f4118bf\") " pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-7t5cw" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.324448 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-84b9b84486-4szsc"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.325842 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-4szsc" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.328445 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-msl9k" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.344895 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84b9b84486-4szsc"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.366263 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-858f76bbdd-qhvrk"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.389139 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-qhvrk" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.396468 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-85d5d9dd78-qq9g6"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.398191 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrl9c\" (UniqueName: \"kubernetes.io/projected/5fc82f31-455f-4960-8538-5315e1a3a09a-kube-api-access-wrl9c\") pod \"designate-operator-controller-manager-85d5d9dd78-qq9g6\" (UID: \"5fc82f31-455f-4960-8538-5315e1a3a09a\") " pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-qq9g6" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.398249 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjrc7\" (UniqueName: \"kubernetes.io/projected/8437ff8a-3892-464b-963b-d5afaf9599dc-kube-api-access-tjrc7\") pod \"barbican-operator-controller-manager-658bdf4b74-mvjkw\" (UID: \"8437ff8a-3892-464b-963b-d5afaf9599dc\") " pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-mvjkw" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.398352 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4b4k\" (UniqueName: \"kubernetes.io/projected/10de6ce0-7aa9-471d-8b90-c44fb9bb0ab6-kube-api-access-g4b4k\") pod \"glance-operator-controller-manager-84b9b84486-4szsc\" (UID: \"10de6ce0-7aa9-471d-8b90-c44fb9bb0ab6\") " pod="openstack-operators/glance-operator-controller-manager-84b9b84486-4szsc" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.398375 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8rsr\" (UniqueName: \"kubernetes.io/projected/26ad6e31-7002-4043-a971-aa507f4118bf-kube-api-access-v8rsr\") pod \"cinder-operator-controller-manager-7b7fb68549-7t5cw\" (UID: \"26ad6e31-7002-4043-a971-aa507f4118bf\") " pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-7t5cw" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.401208 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-ntsdc" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.413661 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-858f76bbdd-qhvrk"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.415882 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-656bcbd775-m95bh"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.416854 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-m95bh" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.419999 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7ffbcb7588-svtkg"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.421111 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-svtkg" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.421514 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-jrsns" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.421662 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.425119 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-pvwpp" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.425508 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-9c5c78d49-dhvdq"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.426242 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-dhvdq" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.429792 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-zn7cg" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.430489 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7ffbcb7588-svtkg"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.434504 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjrc7\" (UniqueName: \"kubernetes.io/projected/8437ff8a-3892-464b-963b-d5afaf9599dc-kube-api-access-tjrc7\") pod \"barbican-operator-controller-manager-658bdf4b74-mvjkw\" (UID: \"8437ff8a-3892-464b-963b-d5afaf9599dc\") " pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-mvjkw" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.451957 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8rsr\" (UniqueName: \"kubernetes.io/projected/26ad6e31-7002-4043-a971-aa507f4118bf-kube-api-access-v8rsr\") pod \"cinder-operator-controller-manager-7b7fb68549-7t5cw\" (UID: \"26ad6e31-7002-4043-a971-aa507f4118bf\") " pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-7t5cw" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.456153 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-656bcbd775-m95bh"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.463524 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-9c5c78d49-dhvdq"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.485563 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-jb2h2"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.500008 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/84815f80-0c57-4246-abe3-7c54bd77d1c1-cert\") pod \"infra-operator-controller-manager-656bcbd775-m95bh\" (UID: \"84815f80-0c57-4246-abe3-7c54bd77d1c1\") " pod="openstack-operators/infra-operator-controller-manager-656bcbd775-m95bh" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.500068 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86wc6\" (UniqueName: \"kubernetes.io/projected/1438d002-6055-453b-8a7a-c83888b37429-kube-api-access-86wc6\") pod \"heat-operator-controller-manager-858f76bbdd-qhvrk\" (UID: \"1438d002-6055-453b-8a7a-c83888b37429\") " pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-qhvrk" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.500092 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bcw2\" (UniqueName: \"kubernetes.io/projected/3b555e01-0210-431d-83ab-97ebcc53a68b-kube-api-access-9bcw2\") pod \"horizon-operator-controller-manager-7ffbcb7588-svtkg\" (UID: \"3b555e01-0210-431d-83ab-97ebcc53a68b\") " pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-svtkg" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.500119 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4b4k\" (UniqueName: \"kubernetes.io/projected/10de6ce0-7aa9-471d-8b90-c44fb9bb0ab6-kube-api-access-g4b4k\") pod \"glance-operator-controller-manager-84b9b84486-4szsc\" (UID: \"10de6ce0-7aa9-471d-8b90-c44fb9bb0ab6\") " pod="openstack-operators/glance-operator-controller-manager-84b9b84486-4szsc" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.500140 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpz9s\" (UniqueName: \"kubernetes.io/projected/21d3fd5d-1f17-45d0-bf73-59fdc7211820-kube-api-access-rpz9s\") pod \"ironic-operator-controller-manager-9c5c78d49-dhvdq\" (UID: \"21d3fd5d-1f17-45d0-bf73-59fdc7211820\") " pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-dhvdq" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.500159 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mgnv\" (UniqueName: \"kubernetes.io/projected/84815f80-0c57-4246-abe3-7c54bd77d1c1-kube-api-access-8mgnv\") pod \"infra-operator-controller-manager-656bcbd775-m95bh\" (UID: \"84815f80-0c57-4246-abe3-7c54bd77d1c1\") " pod="openstack-operators/infra-operator-controller-manager-656bcbd775-m95bh" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.500187 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrl9c\" (UniqueName: \"kubernetes.io/projected/5fc82f31-455f-4960-8538-5315e1a3a09a-kube-api-access-wrl9c\") pod \"designate-operator-controller-manager-85d5d9dd78-qq9g6\" (UID: \"5fc82f31-455f-4960-8538-5315e1a3a09a\") " pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-qq9g6" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.501467 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-jb2h2" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.504839 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-wdm5b" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.509416 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-5f67fbc655-2htjv"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.510211 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-2htjv" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.515041 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-vnjzn" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.534530 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4b4k\" (UniqueName: \"kubernetes.io/projected/10de6ce0-7aa9-471d-8b90-c44fb9bb0ab6-kube-api-access-g4b4k\") pod \"glance-operator-controller-manager-84b9b84486-4szsc\" (UID: \"10de6ce0-7aa9-471d-8b90-c44fb9bb0ab6\") " pod="openstack-operators/glance-operator-controller-manager-84b9b84486-4szsc" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.538263 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-jb2h2"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.541144 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrl9c\" (UniqueName: \"kubernetes.io/projected/5fc82f31-455f-4960-8538-5315e1a3a09a-kube-api-access-wrl9c\") pod \"designate-operator-controller-manager-85d5d9dd78-qq9g6\" (UID: \"5fc82f31-455f-4960-8538-5315e1a3a09a\") " pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-qq9g6" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.541340 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-mvjkw" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.548155 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-7t5cw" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.556451 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5f67fbc655-2htjv"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.558398 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-6lbkw"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.559454 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-6lbkw" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.562574 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-fr5qj" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.576025 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-6lbkw"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.590536 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-79d585cb66-d7sqz"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.591512 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-d7sqz" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.592929 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-8522n" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.596712 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-5df598886f-sd5xk"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.598091 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5df598886f-sd5xk" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.598243 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-qq9g6" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.602652 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpz9s\" (UniqueName: \"kubernetes.io/projected/21d3fd5d-1f17-45d0-bf73-59fdc7211820-kube-api-access-rpz9s\") pod \"ironic-operator-controller-manager-9c5c78d49-dhvdq\" (UID: \"21d3fd5d-1f17-45d0-bf73-59fdc7211820\") " pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-dhvdq" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.602687 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mgnv\" (UniqueName: \"kubernetes.io/projected/84815f80-0c57-4246-abe3-7c54bd77d1c1-kube-api-access-8mgnv\") pod \"infra-operator-controller-manager-656bcbd775-m95bh\" (UID: \"84815f80-0c57-4246-abe3-7c54bd77d1c1\") " pod="openstack-operators/infra-operator-controller-manager-656bcbd775-m95bh" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.602736 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/84815f80-0c57-4246-abe3-7c54bd77d1c1-cert\") pod \"infra-operator-controller-manager-656bcbd775-m95bh\" (UID: \"84815f80-0c57-4246-abe3-7c54bd77d1c1\") " pod="openstack-operators/infra-operator-controller-manager-656bcbd775-m95bh" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.602779 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rd8nj\" (UniqueName: \"kubernetes.io/projected/fa97dde8-95a7-4c4b-820d-d889545d79d5-kube-api-access-rd8nj\") pod \"manila-operator-controller-manager-5f67fbc655-2htjv\" (UID: \"fa97dde8-95a7-4c4b-820d-d889545d79d5\") " pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-2htjv" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.602805 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86wc6\" (UniqueName: \"kubernetes.io/projected/1438d002-6055-453b-8a7a-c83888b37429-kube-api-access-86wc6\") pod \"heat-operator-controller-manager-858f76bbdd-qhvrk\" (UID: \"1438d002-6055-453b-8a7a-c83888b37429\") " pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-qhvrk" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.602824 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bcw2\" (UniqueName: \"kubernetes.io/projected/3b555e01-0210-431d-83ab-97ebcc53a68b-kube-api-access-9bcw2\") pod \"horizon-operator-controller-manager-7ffbcb7588-svtkg\" (UID: \"3b555e01-0210-431d-83ab-97ebcc53a68b\") " pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-svtkg" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.602843 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mk8v2\" (UniqueName: \"kubernetes.io/projected/72cf445d-90ac-4c98-a1df-3a3a4c2f0c7a-kube-api-access-mk8v2\") pod \"mariadb-operator-controller-manager-f9fb45f8f-6lbkw\" (UID: \"72cf445d-90ac-4c98-a1df-3a3a4c2f0c7a\") " pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-6lbkw" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.602864 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5b98k\" (UniqueName: \"kubernetes.io/projected/f78f287b-b34d-40c0-ad99-caaf90bc2ae7-kube-api-access-5b98k\") pod \"keystone-operator-controller-manager-55b6b7c7b8-jb2h2\" (UID: \"f78f287b-b34d-40c0-ad99-caaf90bc2ae7\") " pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-jb2h2" Oct 09 09:17:46 crc kubenswrapper[4710]: E1009 09:17:46.603166 4710 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Oct 09 09:17:46 crc kubenswrapper[4710]: E1009 09:17:46.603326 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/84815f80-0c57-4246-abe3-7c54bd77d1c1-cert podName:84815f80-0c57-4246-abe3-7c54bd77d1c1 nodeName:}" failed. No retries permitted until 2025-10-09 09:17:47.10321031 +0000 UTC m=+790.593318707 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/84815f80-0c57-4246-abe3-7c54bd77d1c1-cert") pod "infra-operator-controller-manager-656bcbd775-m95bh" (UID: "84815f80-0c57-4246-abe3-7c54bd77d1c1") : secret "infra-operator-webhook-server-cert" not found Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.617577 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-x5br5" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.643687 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-nmmkl"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.644577 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-nmmkl" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.648638 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpz9s\" (UniqueName: \"kubernetes.io/projected/21d3fd5d-1f17-45d0-bf73-59fdc7211820-kube-api-access-rpz9s\") pod \"ironic-operator-controller-manager-9c5c78d49-dhvdq\" (UID: \"21d3fd5d-1f17-45d0-bf73-59fdc7211820\") " pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-dhvdq" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.648929 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-xwb5x" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.649366 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-4szsc" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.654515 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-79d585cb66-d7sqz"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.654685 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86wc6\" (UniqueName: \"kubernetes.io/projected/1438d002-6055-453b-8a7a-c83888b37429-kube-api-access-86wc6\") pod \"heat-operator-controller-manager-858f76bbdd-qhvrk\" (UID: \"1438d002-6055-453b-8a7a-c83888b37429\") " pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-qhvrk" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.655530 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mgnv\" (UniqueName: \"kubernetes.io/projected/84815f80-0c57-4246-abe3-7c54bd77d1c1-kube-api-access-8mgnv\") pod \"infra-operator-controller-manager-656bcbd775-m95bh\" (UID: \"84815f80-0c57-4246-abe3-7c54bd77d1c1\") " pod="openstack-operators/infra-operator-controller-manager-656bcbd775-m95bh" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.657874 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bcw2\" (UniqueName: \"kubernetes.io/projected/3b555e01-0210-431d-83ab-97ebcc53a68b-kube-api-access-9bcw2\") pod \"horizon-operator-controller-manager-7ffbcb7588-svtkg\" (UID: \"3b555e01-0210-431d-83ab-97ebcc53a68b\") " pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-svtkg" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.668634 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5df598886f-sd5xk"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.685861 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-nmmkl"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.704128 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92lks\" (UniqueName: \"kubernetes.io/projected/addc94b4-bdbb-4a05-993d-5a7ac2bb3e19-kube-api-access-92lks\") pod \"octavia-operator-controller-manager-69fdcfc5f5-nmmkl\" (UID: \"addc94b4-bdbb-4a05-993d-5a7ac2bb3e19\") " pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-nmmkl" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.704273 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrvtq\" (UniqueName: \"kubernetes.io/projected/51113172-27cd-47a3-8bc2-b751cb1654f7-kube-api-access-jrvtq\") pod \"nova-operator-controller-manager-5df598886f-sd5xk\" (UID: \"51113172-27cd-47a3-8bc2-b751cb1654f7\") " pod="openstack-operators/nova-operator-controller-manager-5df598886f-sd5xk" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.704383 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rd8nj\" (UniqueName: \"kubernetes.io/projected/fa97dde8-95a7-4c4b-820d-d889545d79d5-kube-api-access-rd8nj\") pod \"manila-operator-controller-manager-5f67fbc655-2htjv\" (UID: \"fa97dde8-95a7-4c4b-820d-d889545d79d5\") " pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-2htjv" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.704487 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mk8v2\" (UniqueName: \"kubernetes.io/projected/72cf445d-90ac-4c98-a1df-3a3a4c2f0c7a-kube-api-access-mk8v2\") pod \"mariadb-operator-controller-manager-f9fb45f8f-6lbkw\" (UID: \"72cf445d-90ac-4c98-a1df-3a3a4c2f0c7a\") " pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-6lbkw" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.704556 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5b98k\" (UniqueName: \"kubernetes.io/projected/f78f287b-b34d-40c0-ad99-caaf90bc2ae7-kube-api-access-5b98k\") pod \"keystone-operator-controller-manager-55b6b7c7b8-jb2h2\" (UID: \"f78f287b-b34d-40c0-ad99-caaf90bc2ae7\") " pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-jb2h2" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.704658 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rd75k\" (UniqueName: \"kubernetes.io/projected/d6f9aa15-3eb4-403a-b4fc-4af18c14d4bf-kube-api-access-rd75k\") pod \"neutron-operator-controller-manager-79d585cb66-d7sqz\" (UID: \"d6f9aa15-3eb4-403a-b4fc-4af18c14d4bf\") " pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-d7sqz" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.708293 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-qhvrk" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.715077 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.720089 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.724570 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-jckmz" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.724741 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.742872 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rd8nj\" (UniqueName: \"kubernetes.io/projected/fa97dde8-95a7-4c4b-820d-d889545d79d5-kube-api-access-rd8nj\") pod \"manila-operator-controller-manager-5f67fbc655-2htjv\" (UID: \"fa97dde8-95a7-4c4b-820d-d889545d79d5\") " pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-2htjv" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.751656 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-79db49b9fb-vnvl6"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.752658 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-79db49b9fb-vnvl6" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.756250 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mk8v2\" (UniqueName: \"kubernetes.io/projected/72cf445d-90ac-4c98-a1df-3a3a4c2f0c7a-kube-api-access-mk8v2\") pod \"mariadb-operator-controller-manager-f9fb45f8f-6lbkw\" (UID: \"72cf445d-90ac-4c98-a1df-3a3a4c2f0c7a\") " pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-6lbkw" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.762901 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5b98k\" (UniqueName: \"kubernetes.io/projected/f78f287b-b34d-40c0-ad99-caaf90bc2ae7-kube-api-access-5b98k\") pod \"keystone-operator-controller-manager-55b6b7c7b8-jb2h2\" (UID: \"f78f287b-b34d-40c0-ad99-caaf90bc2ae7\") " pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-jb2h2" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.764804 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-8v7w6" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.773920 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.784880 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-68b6c87b68-lghdj"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.785907 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-lghdj" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.791669 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-jf8ql" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.794803 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-db6d7f97b-rh8n6"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.815866 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-svtkg" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.817234 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-dhvdq" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.817768 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-rh8n6" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.831151 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-jb2h2" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.835302 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92lks\" (UniqueName: \"kubernetes.io/projected/addc94b4-bdbb-4a05-993d-5a7ac2bb3e19-kube-api-access-92lks\") pod \"octavia-operator-controller-manager-69fdcfc5f5-nmmkl\" (UID: \"addc94b4-bdbb-4a05-993d-5a7ac2bb3e19\") " pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-nmmkl" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.835447 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrvtq\" (UniqueName: \"kubernetes.io/projected/51113172-27cd-47a3-8bc2-b751cb1654f7-kube-api-access-jrvtq\") pod \"nova-operator-controller-manager-5df598886f-sd5xk\" (UID: \"51113172-27cd-47a3-8bc2-b751cb1654f7\") " pod="openstack-operators/nova-operator-controller-manager-5df598886f-sd5xk" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.835507 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rd75k\" (UniqueName: \"kubernetes.io/projected/d6f9aa15-3eb4-403a-b4fc-4af18c14d4bf-kube-api-access-rd75k\") pod \"neutron-operator-controller-manager-79d585cb66-d7sqz\" (UID: \"d6f9aa15-3eb4-403a-b4fc-4af18c14d4bf\") " pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-d7sqz" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.846646 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-79db49b9fb-vnvl6"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.861743 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-5s6vc" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.878669 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-2htjv" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.926812 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rd75k\" (UniqueName: \"kubernetes.io/projected/d6f9aa15-3eb4-403a-b4fc-4af18c14d4bf-kube-api-access-rd75k\") pod \"neutron-operator-controller-manager-79d585cb66-d7sqz\" (UID: \"d6f9aa15-3eb4-403a-b4fc-4af18c14d4bf\") " pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-d7sqz" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.940796 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-6lbkw" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.943022 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2cpk\" (UniqueName: \"kubernetes.io/projected/2e6376a2-edb7-4958-b3b3-3a6773782349-kube-api-access-s2cpk\") pod \"placement-operator-controller-manager-68b6c87b68-lghdj\" (UID: \"2e6376a2-edb7-4958-b3b3-3a6773782349\") " pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-lghdj" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.943095 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2sdx\" (UniqueName: \"kubernetes.io/projected/9f1f2915-bd9f-496f-a513-e1fd022ee463-kube-api-access-f2sdx\") pod \"swift-operator-controller-manager-db6d7f97b-rh8n6\" (UID: \"9f1f2915-bd9f-496f-a513-e1fd022ee463\") " pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-rh8n6" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.943161 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cce3973b-b375-4ea1-b907-0f46e330dfae-cert\") pod \"openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj\" (UID: \"cce3973b-b375-4ea1-b907-0f46e330dfae\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.943202 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z45tr\" (UniqueName: \"kubernetes.io/projected/4a08f5f1-bab4-425c-b81c-b48f2d4a186b-kube-api-access-z45tr\") pod \"ovn-operator-controller-manager-79db49b9fb-vnvl6\" (UID: \"4a08f5f1-bab4-425c-b81c-b48f2d4a186b\") " pod="openstack-operators/ovn-operator-controller-manager-79db49b9fb-vnvl6" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.943227 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrfrf\" (UniqueName: \"kubernetes.io/projected/cce3973b-b375-4ea1-b907-0f46e330dfae-kube-api-access-xrfrf\") pod \"openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj\" (UID: \"cce3973b-b375-4ea1-b907-0f46e330dfae\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.948973 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-d7sqz" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.949708 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92lks\" (UniqueName: \"kubernetes.io/projected/addc94b4-bdbb-4a05-993d-5a7ac2bb3e19-kube-api-access-92lks\") pod \"octavia-operator-controller-manager-69fdcfc5f5-nmmkl\" (UID: \"addc94b4-bdbb-4a05-993d-5a7ac2bb3e19\") " pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-nmmkl" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.970992 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrvtq\" (UniqueName: \"kubernetes.io/projected/51113172-27cd-47a3-8bc2-b751cb1654f7-kube-api-access-jrvtq\") pod \"nova-operator-controller-manager-5df598886f-sd5xk\" (UID: \"51113172-27cd-47a3-8bc2-b751cb1654f7\") " pod="openstack-operators/nova-operator-controller-manager-5df598886f-sd5xk" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.972771 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-67cfc6749b-t69gh"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.973904 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5458f77c4-fzwm9"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.974719 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-68b6c87b68-lghdj"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.974743 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-67cfc6749b-t69gh"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.974756 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-db6d7f97b-rh8n6"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.974832 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5458f77c4-fzwm9" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.975232 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-t69gh" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.975388 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5458f77c4-fzwm9"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.976643 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5df598886f-sd5xk" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.978586 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-qwmj2" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.978931 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-cm72v" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.983366 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-7f554bff7b-759pl"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.988650 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-7f554bff7b-759pl"] Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.988749 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-759pl" Oct 09 09:17:46 crc kubenswrapper[4710]: I1009 09:17:46.994051 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-76vs4" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.016827 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-nmmkl" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.056857 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5cgr\" (UniqueName: \"kubernetes.io/projected/2c55fbcc-5995-4a59-b8ae-dc8be7411fa8-kube-api-access-f5cgr\") pod \"telemetry-operator-controller-manager-67cfc6749b-t69gh\" (UID: \"2c55fbcc-5995-4a59-b8ae-dc8be7411fa8\") " pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-t69gh" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.056908 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2cpk\" (UniqueName: \"kubernetes.io/projected/2e6376a2-edb7-4958-b3b3-3a6773782349-kube-api-access-s2cpk\") pod \"placement-operator-controller-manager-68b6c87b68-lghdj\" (UID: \"2e6376a2-edb7-4958-b3b3-3a6773782349\") " pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-lghdj" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.056935 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fb5ms\" (UniqueName: \"kubernetes.io/projected/b8dd2f89-b87d-4669-8c6e-7c8035b6fcd3-kube-api-access-fb5ms\") pod \"test-operator-controller-manager-5458f77c4-fzwm9\" (UID: \"b8dd2f89-b87d-4669-8c6e-7c8035b6fcd3\") " pod="openstack-operators/test-operator-controller-manager-5458f77c4-fzwm9" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.056975 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2sdx\" (UniqueName: \"kubernetes.io/projected/9f1f2915-bd9f-496f-a513-e1fd022ee463-kube-api-access-f2sdx\") pod \"swift-operator-controller-manager-db6d7f97b-rh8n6\" (UID: \"9f1f2915-bd9f-496f-a513-e1fd022ee463\") " pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-rh8n6" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.057022 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cce3973b-b375-4ea1-b907-0f46e330dfae-cert\") pod \"openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj\" (UID: \"cce3973b-b375-4ea1-b907-0f46e330dfae\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.057057 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z45tr\" (UniqueName: \"kubernetes.io/projected/4a08f5f1-bab4-425c-b81c-b48f2d4a186b-kube-api-access-z45tr\") pod \"ovn-operator-controller-manager-79db49b9fb-vnvl6\" (UID: \"4a08f5f1-bab4-425c-b81c-b48f2d4a186b\") " pod="openstack-operators/ovn-operator-controller-manager-79db49b9fb-vnvl6" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.057083 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrfrf\" (UniqueName: \"kubernetes.io/projected/cce3973b-b375-4ea1-b907-0f46e330dfae-kube-api-access-xrfrf\") pod \"openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj\" (UID: \"cce3973b-b375-4ea1-b907-0f46e330dfae\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.057140 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qc8s\" (UniqueName: \"kubernetes.io/projected/94e1d5bc-92ce-4e9a-b0c7-bef14a881f9e-kube-api-access-8qc8s\") pod \"watcher-operator-controller-manager-7f554bff7b-759pl\" (UID: \"94e1d5bc-92ce-4e9a-b0c7-bef14a881f9e\") " pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-759pl" Oct 09 09:17:47 crc kubenswrapper[4710]: E1009 09:17:47.057593 4710 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 09 09:17:47 crc kubenswrapper[4710]: E1009 09:17:47.057632 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cce3973b-b375-4ea1-b907-0f46e330dfae-cert podName:cce3973b-b375-4ea1-b907-0f46e330dfae nodeName:}" failed. No retries permitted until 2025-10-09 09:17:47.557618114 +0000 UTC m=+791.047726511 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cce3973b-b375-4ea1-b907-0f46e330dfae-cert") pod "openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj" (UID: "cce3973b-b375-4ea1-b907-0f46e330dfae") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.103268 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrfrf\" (UniqueName: \"kubernetes.io/projected/cce3973b-b375-4ea1-b907-0f46e330dfae-kube-api-access-xrfrf\") pod \"openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj\" (UID: \"cce3973b-b375-4ea1-b907-0f46e330dfae\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.103904 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z45tr\" (UniqueName: \"kubernetes.io/projected/4a08f5f1-bab4-425c-b81c-b48f2d4a186b-kube-api-access-z45tr\") pod \"ovn-operator-controller-manager-79db49b9fb-vnvl6\" (UID: \"4a08f5f1-bab4-425c-b81c-b48f2d4a186b\") " pod="openstack-operators/ovn-operator-controller-manager-79db49b9fb-vnvl6" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.113895 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2cpk\" (UniqueName: \"kubernetes.io/projected/2e6376a2-edb7-4958-b3b3-3a6773782349-kube-api-access-s2cpk\") pod \"placement-operator-controller-manager-68b6c87b68-lghdj\" (UID: \"2e6376a2-edb7-4958-b3b3-3a6773782349\") " pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-lghdj" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.151329 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2sdx\" (UniqueName: \"kubernetes.io/projected/9f1f2915-bd9f-496f-a513-e1fd022ee463-kube-api-access-f2sdx\") pod \"swift-operator-controller-manager-db6d7f97b-rh8n6\" (UID: \"9f1f2915-bd9f-496f-a513-e1fd022ee463\") " pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-rh8n6" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.159834 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-79db49b9fb-vnvl6" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.161679 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/84815f80-0c57-4246-abe3-7c54bd77d1c1-cert\") pod \"infra-operator-controller-manager-656bcbd775-m95bh\" (UID: \"84815f80-0c57-4246-abe3-7c54bd77d1c1\") " pod="openstack-operators/infra-operator-controller-manager-656bcbd775-m95bh" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.161739 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qc8s\" (UniqueName: \"kubernetes.io/projected/94e1d5bc-92ce-4e9a-b0c7-bef14a881f9e-kube-api-access-8qc8s\") pod \"watcher-operator-controller-manager-7f554bff7b-759pl\" (UID: \"94e1d5bc-92ce-4e9a-b0c7-bef14a881f9e\") " pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-759pl" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.161782 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5cgr\" (UniqueName: \"kubernetes.io/projected/2c55fbcc-5995-4a59-b8ae-dc8be7411fa8-kube-api-access-f5cgr\") pod \"telemetry-operator-controller-manager-67cfc6749b-t69gh\" (UID: \"2c55fbcc-5995-4a59-b8ae-dc8be7411fa8\") " pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-t69gh" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.161812 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fb5ms\" (UniqueName: \"kubernetes.io/projected/b8dd2f89-b87d-4669-8c6e-7c8035b6fcd3-kube-api-access-fb5ms\") pod \"test-operator-controller-manager-5458f77c4-fzwm9\" (UID: \"b8dd2f89-b87d-4669-8c6e-7c8035b6fcd3\") " pod="openstack-operators/test-operator-controller-manager-5458f77c4-fzwm9" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.182030 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/84815f80-0c57-4246-abe3-7c54bd77d1c1-cert\") pod \"infra-operator-controller-manager-656bcbd775-m95bh\" (UID: \"84815f80-0c57-4246-abe3-7c54bd77d1c1\") " pod="openstack-operators/infra-operator-controller-manager-656bcbd775-m95bh" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.206087 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qc8s\" (UniqueName: \"kubernetes.io/projected/94e1d5bc-92ce-4e9a-b0c7-bef14a881f9e-kube-api-access-8qc8s\") pod \"watcher-operator-controller-manager-7f554bff7b-759pl\" (UID: \"94e1d5bc-92ce-4e9a-b0c7-bef14a881f9e\") " pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-759pl" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.206959 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fb5ms\" (UniqueName: \"kubernetes.io/projected/b8dd2f89-b87d-4669-8c6e-7c8035b6fcd3-kube-api-access-fb5ms\") pod \"test-operator-controller-manager-5458f77c4-fzwm9\" (UID: \"b8dd2f89-b87d-4669-8c6e-7c8035b6fcd3\") " pod="openstack-operators/test-operator-controller-manager-5458f77c4-fzwm9" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.207746 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5cgr\" (UniqueName: \"kubernetes.io/projected/2c55fbcc-5995-4a59-b8ae-dc8be7411fa8-kube-api-access-f5cgr\") pod \"telemetry-operator-controller-manager-67cfc6749b-t69gh\" (UID: \"2c55fbcc-5995-4a59-b8ae-dc8be7411fa8\") " pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-t69gh" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.244632 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-lghdj" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.275401 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-rh8n6" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.337293 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5458f77c4-fzwm9" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.375796 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-m95bh" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.375908 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-t69gh" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.384372 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7d6957655c-6trjl"] Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.390210 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7d6957655c-6trjl" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.409787 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.409984 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-2957n" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.455864 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-759pl" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.475372 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69gfj\" (UniqueName: \"kubernetes.io/projected/eda25b03-4fb3-4ace-803c-1d1800196995-kube-api-access-69gfj\") pod \"openstack-operator-controller-manager-7d6957655c-6trjl\" (UID: \"eda25b03-4fb3-4ace-803c-1d1800196995\") " pod="openstack-operators/openstack-operator-controller-manager-7d6957655c-6trjl" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.475500 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eda25b03-4fb3-4ace-803c-1d1800196995-cert\") pod \"openstack-operator-controller-manager-7d6957655c-6trjl\" (UID: \"eda25b03-4fb3-4ace-803c-1d1800196995\") " pod="openstack-operators/openstack-operator-controller-manager-7d6957655c-6trjl" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.479487 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7d6957655c-6trjl"] Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.517789 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-292ft"] Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.518613 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-292ft" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.522023 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-lc2kk" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.540361 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-292ft"] Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.576840 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eda25b03-4fb3-4ace-803c-1d1800196995-cert\") pod \"openstack-operator-controller-manager-7d6957655c-6trjl\" (UID: \"eda25b03-4fb3-4ace-803c-1d1800196995\") " pod="openstack-operators/openstack-operator-controller-manager-7d6957655c-6trjl" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.577153 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slztw\" (UniqueName: \"kubernetes.io/projected/eb73b966-fd38-499c-a018-d28ad9acda92-kube-api-access-slztw\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-292ft\" (UID: \"eb73b966-fd38-499c-a018-d28ad9acda92\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-292ft" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.577262 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cce3973b-b375-4ea1-b907-0f46e330dfae-cert\") pod \"openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj\" (UID: \"cce3973b-b375-4ea1-b907-0f46e330dfae\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.577288 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69gfj\" (UniqueName: \"kubernetes.io/projected/eda25b03-4fb3-4ace-803c-1d1800196995-kube-api-access-69gfj\") pod \"openstack-operator-controller-manager-7d6957655c-6trjl\" (UID: \"eda25b03-4fb3-4ace-803c-1d1800196995\") " pod="openstack-operators/openstack-operator-controller-manager-7d6957655c-6trjl" Oct 09 09:17:47 crc kubenswrapper[4710]: E1009 09:17:47.577690 4710 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Oct 09 09:17:47 crc kubenswrapper[4710]: E1009 09:17:47.577745 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eda25b03-4fb3-4ace-803c-1d1800196995-cert podName:eda25b03-4fb3-4ace-803c-1d1800196995 nodeName:}" failed. No retries permitted until 2025-10-09 09:17:48.077731535 +0000 UTC m=+791.567839933 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/eda25b03-4fb3-4ace-803c-1d1800196995-cert") pod "openstack-operator-controller-manager-7d6957655c-6trjl" (UID: "eda25b03-4fb3-4ace-803c-1d1800196995") : secret "webhook-server-cert" not found Oct 09 09:17:47 crc kubenswrapper[4710]: E1009 09:17:47.577903 4710 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 09 09:17:47 crc kubenswrapper[4710]: E1009 09:17:47.577928 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cce3973b-b375-4ea1-b907-0f46e330dfae-cert podName:cce3973b-b375-4ea1-b907-0f46e330dfae nodeName:}" failed. No retries permitted until 2025-10-09 09:17:48.577920432 +0000 UTC m=+792.068028828 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cce3973b-b375-4ea1-b907-0f46e330dfae-cert") pod "openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj" (UID: "cce3973b-b375-4ea1-b907-0f46e330dfae") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.621290 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69gfj\" (UniqueName: \"kubernetes.io/projected/eda25b03-4fb3-4ace-803c-1d1800196995-kube-api-access-69gfj\") pod \"openstack-operator-controller-manager-7d6957655c-6trjl\" (UID: \"eda25b03-4fb3-4ace-803c-1d1800196995\") " pod="openstack-operators/openstack-operator-controller-manager-7d6957655c-6trjl" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.681896 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slztw\" (UniqueName: \"kubernetes.io/projected/eb73b966-fd38-499c-a018-d28ad9acda92-kube-api-access-slztw\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-292ft\" (UID: \"eb73b966-fd38-499c-a018-d28ad9acda92\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-292ft" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.687829 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7b7fb68549-7t5cw"] Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.712186 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slztw\" (UniqueName: \"kubernetes.io/projected/eb73b966-fd38-499c-a018-d28ad9acda92-kube-api-access-slztw\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-292ft\" (UID: \"eb73b966-fd38-499c-a018-d28ad9acda92\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-292ft" Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.755745 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-658bdf4b74-mvjkw"] Oct 09 09:17:47 crc kubenswrapper[4710]: I1009 09:17:47.905238 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-292ft" Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.018513 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-mvjkw" event={"ID":"8437ff8a-3892-464b-963b-d5afaf9599dc","Type":"ContainerStarted","Data":"861edbd8eb4fd7067c57eeb4d4c3241dadbc93b13443fcb8670219c0980c3c43"} Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.019821 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-7t5cw" event={"ID":"26ad6e31-7002-4043-a971-aa507f4118bf","Type":"ContainerStarted","Data":"da528abf823d7482186c0105c8034ae4403a390fbdee360a9b025ba4c4f5d604"} Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.048150 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-858f76bbdd-qhvrk"] Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.091059 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eda25b03-4fb3-4ace-803c-1d1800196995-cert\") pod \"openstack-operator-controller-manager-7d6957655c-6trjl\" (UID: \"eda25b03-4fb3-4ace-803c-1d1800196995\") " pod="openstack-operators/openstack-operator-controller-manager-7d6957655c-6trjl" Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.101386 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eda25b03-4fb3-4ace-803c-1d1800196995-cert\") pod \"openstack-operator-controller-manager-7d6957655c-6trjl\" (UID: \"eda25b03-4fb3-4ace-803c-1d1800196995\") " pod="openstack-operators/openstack-operator-controller-manager-7d6957655c-6trjl" Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.158172 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7d6957655c-6trjl" Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.514781 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-79d585cb66-d7sqz"] Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.531696 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84b9b84486-4szsc"] Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.535220 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-85d5d9dd78-qq9g6"] Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.542140 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-jb2h2"] Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.548675 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-9c5c78d49-dhvdq"] Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.550821 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5df598886f-sd5xk"] Oct 09 09:17:48 crc kubenswrapper[4710]: W1009 09:17:48.551972 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fc82f31_455f_4960_8538_5315e1a3a09a.slice/crio-f808aa59612f6f4e0ed3337b5f96ad2c0bb88f1eb9dd9ec2971a600ee9fa1b98 WatchSource:0}: Error finding container f808aa59612f6f4e0ed3337b5f96ad2c0bb88f1eb9dd9ec2971a600ee9fa1b98: Status 404 returned error can't find the container with id f808aa59612f6f4e0ed3337b5f96ad2c0bb88f1eb9dd9ec2971a600ee9fa1b98 Oct 09 09:17:48 crc kubenswrapper[4710]: W1009 09:17:48.555054 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod10de6ce0_7aa9_471d_8b90_c44fb9bb0ab6.slice/crio-4f4ab2218e09383371244d6bb9d6b95daa599d83f495d3cc6dc3a0758c8577ad WatchSource:0}: Error finding container 4f4ab2218e09383371244d6bb9d6b95daa599d83f495d3cc6dc3a0758c8577ad: Status 404 returned error can't find the container with id 4f4ab2218e09383371244d6bb9d6b95daa599d83f495d3cc6dc3a0758c8577ad Oct 09 09:17:48 crc kubenswrapper[4710]: W1009 09:17:48.560788 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd6f9aa15_3eb4_403a_b4fc_4af18c14d4bf.slice/crio-0fdc434090d9817fac6f3a7bddd837459500cd1f561576448d7b18a746e96cfc WatchSource:0}: Error finding container 0fdc434090d9817fac6f3a7bddd837459500cd1f561576448d7b18a746e96cfc: Status 404 returned error can't find the container with id 0fdc434090d9817fac6f3a7bddd837459500cd1f561576448d7b18a746e96cfc Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.577068 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5f67fbc655-2htjv"] Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.590480 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7ffbcb7588-svtkg"] Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.598045 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-6lbkw"] Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.601053 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cce3973b-b375-4ea1-b907-0f46e330dfae-cert\") pod \"openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj\" (UID: \"cce3973b-b375-4ea1-b907-0f46e330dfae\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj" Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.605755 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cce3973b-b375-4ea1-b907-0f46e330dfae-cert\") pod \"openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj\" (UID: \"cce3973b-b375-4ea1-b907-0f46e330dfae\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj" Oct 09 09:17:48 crc kubenswrapper[4710]: W1009 09:17:48.624634 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod72cf445d_90ac_4c98_a1df_3a3a4c2f0c7a.slice/crio-a602b0c079f11f448627d13862c2ac7dc9b7deb0f5d20694eb6f0ceb1809177e WatchSource:0}: Error finding container a602b0c079f11f448627d13862c2ac7dc9b7deb0f5d20694eb6f0ceb1809177e: Status 404 returned error can't find the container with id a602b0c079f11f448627d13862c2ac7dc9b7deb0f5d20694eb6f0ceb1809177e Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.642488 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj" Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.901464 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-79db49b9fb-vnvl6"] Oct 09 09:17:48 crc kubenswrapper[4710]: W1009 09:17:48.927819 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4a08f5f1_bab4_425c_b81c_b48f2d4a186b.slice/crio-49840a7c463f7153f34234bf8a13ac977d1e5268635415459382699585307c24 WatchSource:0}: Error finding container 49840a7c463f7153f34234bf8a13ac977d1e5268635415459382699585307c24: Status 404 returned error can't find the container with id 49840a7c463f7153f34234bf8a13ac977d1e5268635415459382699585307c24 Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.942315 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-68b6c87b68-lghdj"] Oct 09 09:17:48 crc kubenswrapper[4710]: W1009 09:17:48.946293 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2e6376a2_edb7_4958_b3b3_3a6773782349.slice/crio-5fa67d3ba6956ec4b33fae9ea66552d6fc0ce19d5aa077f761a0587c86f45d3f WatchSource:0}: Error finding container 5fa67d3ba6956ec4b33fae9ea66552d6fc0ce19d5aa077f761a0587c86f45d3f: Status 404 returned error can't find the container with id 5fa67d3ba6956ec4b33fae9ea66552d6fc0ce19d5aa077f761a0587c86f45d3f Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.947647 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5458f77c4-fzwm9"] Oct 09 09:17:48 crc kubenswrapper[4710]: W1009 09:17:48.956456 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb8dd2f89_b87d_4669_8c6e_7c8035b6fcd3.slice/crio-cbed8397365026097cc0474899eae398baf11704077153b49257e0120d13c044 WatchSource:0}: Error finding container cbed8397365026097cc0474899eae398baf11704077153b49257e0120d13c044: Status 404 returned error can't find the container with id cbed8397365026097cc0474899eae398baf11704077153b49257e0120d13c044 Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.956662 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-db6d7f97b-rh8n6"] Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.964682 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-nmmkl"] Oct 09 09:17:48 crc kubenswrapper[4710]: W1009 09:17:48.969761 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaddc94b4_bdbb_4a05_993d_5a7ac2bb3e19.slice/crio-0241565517e5bef189804262b6b5a3a1719a9dc8d2b616c525af10f0e9db2797 WatchSource:0}: Error finding container 0241565517e5bef189804262b6b5a3a1719a9dc8d2b616c525af10f0e9db2797: Status 404 returned error can't find the container with id 0241565517e5bef189804262b6b5a3a1719a9dc8d2b616c525af10f0e9db2797 Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.972800 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-67cfc6749b-t69gh"] Oct 09 09:17:48 crc kubenswrapper[4710]: W1009 09:17:48.977190 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2c55fbcc_5995_4a59_b8ae_dc8be7411fa8.slice/crio-a4aed253faa861946c1d95ccc5f87c8047664f8c0817254c1b8396c7ecd5a0fe WatchSource:0}: Error finding container a4aed253faa861946c1d95ccc5f87c8047664f8c0817254c1b8396c7ecd5a0fe: Status 404 returned error can't find the container with id a4aed253faa861946c1d95ccc5f87c8047664f8c0817254c1b8396c7ecd5a0fe Oct 09 09:17:48 crc kubenswrapper[4710]: I1009 09:17:48.980368 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-292ft"] Oct 09 09:17:48 crc kubenswrapper[4710]: E1009 09:17:48.985381 4710 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:abe978f8da75223de5043cca50278ad4e28c8dd309883f502fe1e7a9998733b0,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-f5cgr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-67cfc6749b-t69gh_openstack-operators(2c55fbcc-5995-4a59-b8ae-dc8be7411fa8): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 09 09:17:48 crc kubenswrapper[4710]: E1009 09:17:48.985605 4710 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:09deecf840d38ff6af3c924729cf0a9444bc985848bfbe7c918019b88a6bc4d7,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-92lks,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-69fdcfc5f5-nmmkl_openstack-operators(addc94b4-bdbb-4a05-993d-5a7ac2bb3e19): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 09 09:17:49 crc kubenswrapper[4710]: I1009 09:17:48.995622 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-656bcbd775-m95bh"] Oct 09 09:17:49 crc kubenswrapper[4710]: E1009 09:17:48.996466 4710 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:4b4a17fe08ce00e375afaaec6a28835f5c1784f03d11c4558376ac04130f3a9e,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-f2sdx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-db6d7f97b-rh8n6_openstack-operators(9f1f2915-bd9f-496f-a513-e1fd022ee463): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 09 09:17:49 crc kubenswrapper[4710]: I1009 09:17:49.009888 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-7f554bff7b-759pl"] Oct 09 09:17:49 crc kubenswrapper[4710]: W1009 09:17:49.012536 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod84815f80_0c57_4246_abe3_7c54bd77d1c1.slice/crio-06018d01513d0a8758b9fc070883bc8dea6ad75d06b3b3918368da21259e7476 WatchSource:0}: Error finding container 06018d01513d0a8758b9fc070883bc8dea6ad75d06b3b3918368da21259e7476: Status 404 returned error can't find the container with id 06018d01513d0a8758b9fc070883bc8dea6ad75d06b3b3918368da21259e7476 Oct 09 09:17:49 crc kubenswrapper[4710]: I1009 09:17:49.014992 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7d6957655c-6trjl"] Oct 09 09:17:49 crc kubenswrapper[4710]: E1009 09:17:49.016377 4710 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:5cfb2ae1092445950b39dd59caa9a8c9367f42fb8353a8c3848d3bc729f24492,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8mgnv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-656bcbd775-m95bh_openstack-operators(84815f80-0c57-4246-abe3-7c54bd77d1c1): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 09 09:17:49 crc kubenswrapper[4710]: E1009 09:17:49.024759 4710 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-slztw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-292ft_openstack-operators(eb73b966-fd38-499c-a018-d28ad9acda92): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 09 09:17:49 crc kubenswrapper[4710]: E1009 09:17:49.026622 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-292ft" podUID="eb73b966-fd38-499c-a018-d28ad9acda92" Oct 09 09:17:49 crc kubenswrapper[4710]: W1009 09:17:49.036052 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeda25b03_4fb3_4ace_803c_1d1800196995.slice/crio-3924fbb2b559af1daca69131307596831d4512ec0b78d381959a3107b3a5543d WatchSource:0}: Error finding container 3924fbb2b559af1daca69131307596831d4512ec0b78d381959a3107b3a5543d: Status 404 returned error can't find the container with id 3924fbb2b559af1daca69131307596831d4512ec0b78d381959a3107b3a5543d Oct 09 09:17:49 crc kubenswrapper[4710]: I1009 09:17:49.057468 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-qq9g6" event={"ID":"5fc82f31-455f-4960-8538-5315e1a3a09a","Type":"ContainerStarted","Data":"f808aa59612f6f4e0ed3337b5f96ad2c0bb88f1eb9dd9ec2971a600ee9fa1b98"} Oct 09 09:17:49 crc kubenswrapper[4710]: I1009 09:17:49.061763 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-2htjv" event={"ID":"fa97dde8-95a7-4c4b-820d-d889545d79d5","Type":"ContainerStarted","Data":"39bfbb179074f2508b6fe56f5e237274fcc56d56c9ef6fb0495ddfe0527f63c8"} Oct 09 09:17:49 crc kubenswrapper[4710]: I1009 09:17:49.065773 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5df598886f-sd5xk" event={"ID":"51113172-27cd-47a3-8bc2-b751cb1654f7","Type":"ContainerStarted","Data":"cb8df56b6ca4893e19bf39e6fb87d964a49605d0bb23e7f3244ce51e5f47b225"} Oct 09 09:17:49 crc kubenswrapper[4710]: E1009 09:17:49.067289 4710 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:98a5233f0596591acdf2c6a5838b08be108787cdb6ad1995b2b7886bac0fe6ca,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8qc8s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-7f554bff7b-759pl_openstack-operators(94e1d5bc-92ce-4e9a-b0c7-bef14a881f9e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 09 09:17:49 crc kubenswrapper[4710]: I1009 09:17:49.070533 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5458f77c4-fzwm9" event={"ID":"b8dd2f89-b87d-4669-8c6e-7c8035b6fcd3","Type":"ContainerStarted","Data":"cbed8397365026097cc0474899eae398baf11704077153b49257e0120d13c044"} Oct 09 09:17:49 crc kubenswrapper[4710]: I1009 09:17:49.080100 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-m95bh" event={"ID":"84815f80-0c57-4246-abe3-7c54bd77d1c1","Type":"ContainerStarted","Data":"06018d01513d0a8758b9fc070883bc8dea6ad75d06b3b3918368da21259e7476"} Oct 09 09:17:49 crc kubenswrapper[4710]: I1009 09:17:49.081944 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-79db49b9fb-vnvl6" event={"ID":"4a08f5f1-bab4-425c-b81c-b48f2d4a186b","Type":"ContainerStarted","Data":"49840a7c463f7153f34234bf8a13ac977d1e5268635415459382699585307c24"} Oct 09 09:17:49 crc kubenswrapper[4710]: I1009 09:17:49.087518 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj"] Oct 09 09:17:49 crc kubenswrapper[4710]: I1009 09:17:49.092939 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-nmmkl" event={"ID":"addc94b4-bdbb-4a05-993d-5a7ac2bb3e19","Type":"ContainerStarted","Data":"0241565517e5bef189804262b6b5a3a1719a9dc8d2b616c525af10f0e9db2797"} Oct 09 09:17:49 crc kubenswrapper[4710]: I1009 09:17:49.098191 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-lghdj" event={"ID":"2e6376a2-edb7-4958-b3b3-3a6773782349","Type":"ContainerStarted","Data":"5fa67d3ba6956ec4b33fae9ea66552d6fc0ce19d5aa077f761a0587c86f45d3f"} Oct 09 09:17:49 crc kubenswrapper[4710]: I1009 09:17:49.103750 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-d7sqz" event={"ID":"d6f9aa15-3eb4-403a-b4fc-4af18c14d4bf","Type":"ContainerStarted","Data":"0fdc434090d9817fac6f3a7bddd837459500cd1f561576448d7b18a746e96cfc"} Oct 09 09:17:49 crc kubenswrapper[4710]: I1009 09:17:49.110393 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-jb2h2" event={"ID":"f78f287b-b34d-40c0-ad99-caaf90bc2ae7","Type":"ContainerStarted","Data":"c2f7c88f28417d2a36a6b24dca6ac9e927593ab26367306d287549f1cb82f079"} Oct 09 09:17:49 crc kubenswrapper[4710]: I1009 09:17:49.116978 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-6lbkw" event={"ID":"72cf445d-90ac-4c98-a1df-3a3a4c2f0c7a","Type":"ContainerStarted","Data":"a602b0c079f11f448627d13862c2ac7dc9b7deb0f5d20694eb6f0ceb1809177e"} Oct 09 09:17:49 crc kubenswrapper[4710]: W1009 09:17:49.121631 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcce3973b_b375_4ea1_b907_0f46e330dfae.slice/crio-629263c58986b8ef90294de403df28f4e39d89a7a0746fd304e4326d7537ace2 WatchSource:0}: Error finding container 629263c58986b8ef90294de403df28f4e39d89a7a0746fd304e4326d7537ace2: Status 404 returned error can't find the container with id 629263c58986b8ef90294de403df28f4e39d89a7a0746fd304e4326d7537ace2 Oct 09 09:17:49 crc kubenswrapper[4710]: I1009 09:17:49.125042 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-4szsc" event={"ID":"10de6ce0-7aa9-471d-8b90-c44fb9bb0ab6","Type":"ContainerStarted","Data":"4f4ab2218e09383371244d6bb9d6b95daa599d83f495d3cc6dc3a0758c8577ad"} Oct 09 09:17:49 crc kubenswrapper[4710]: I1009 09:17:49.130048 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-rh8n6" event={"ID":"9f1f2915-bd9f-496f-a513-e1fd022ee463","Type":"ContainerStarted","Data":"301f957b715e24fe5a534054c541905973f035ce78870f2de55a4f7cc86c25be"} Oct 09 09:17:49 crc kubenswrapper[4710]: I1009 09:17:49.132102 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-qhvrk" event={"ID":"1438d002-6055-453b-8a7a-c83888b37429","Type":"ContainerStarted","Data":"9c8babb8b4a945ca8072a0111025e8744d9407e9f6bcbd21fb1f970fc164d45d"} Oct 09 09:17:49 crc kubenswrapper[4710]: I1009 09:17:49.139139 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-t69gh" event={"ID":"2c55fbcc-5995-4a59-b8ae-dc8be7411fa8","Type":"ContainerStarted","Data":"a4aed253faa861946c1d95ccc5f87c8047664f8c0817254c1b8396c7ecd5a0fe"} Oct 09 09:17:49 crc kubenswrapper[4710]: E1009 09:17:49.142306 4710 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:a17fc270857869fd1efe5020b2a1cb8c2abbd838f08de88f3a6a59e8754ec351,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent@sha256:03b4f3db4b373515f7e4095984b97197c05a14f87b2a0a525eb5d7be1d7bda66,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner@sha256:6722a752fb7cbffbae811f6ad6567120fbd4ebbe8c38a83ec2df02850a3276bd,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api@sha256:2115452234aedb505ed4efc6cd9b9a4ce3b9809aa7d0128d8fbeeee84dad1a69,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator@sha256:50597a8eaa6c4383f357574dcab8358b698729797b4156d932985a08ab86b7cd,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener@sha256:cb4997d62c7b2534233a676cb92e19cf85dda07e2fb9fa642c28aab30489f69a,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier@sha256:1ccbf3f6cf24c9ee91bed71467491e22b8cb4b95bce90250f4174fae936b0fa1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24@sha256:e91d58021b54c46883595ff66be65882de54abdb3be2ca53c4162b20d18b5f48,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:cbe345acb37e57986ecf6685d28c72d0e639bdb493a18e9d3ba947d6c3a16384,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener@sha256:e7dcc3bf23d5e0393ac173e3c43d4ae85f4613a4fd16b3c147dc32ae491d49bf,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker@sha256:2a1a8b582c6e4cc31081bd8b0887acf45e31c1d14596c4e361d27d08fef0debf,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central@sha256:86daeb9c834bfcedb533086dff59a6b5b6e832b94ce2a9116337f8736bb80032,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute@sha256:5d4fdf424fad33a3650163e9e7423f92e97de3305508c2b7c6435822e0313189,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi@sha256:6d28de018f6e1672e775a75735e3bc16b63da41acd8fb5196ee0b06856c07133,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter@sha256:7211a617ec657701ca819aa0ba28e1d5750f5bf2c1391b755cc4a48cc360b0fa,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification@sha256:c5fc9b72fc593bcf3b569c7ed24a256448eb1afab1504e668a3822e978be1306,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core@sha256:09b5017c95d7697e66b9c64846bc48ef5826a009cba89b956ec54561e5f4a2d1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:85c75d60e1bd2f8a9ea0a2bb21a8df64c0a6f7b504cc1a05a355981d4b90e92f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup@sha256:88b99249f15470f359fb554f7f3a56974b743f4655e3f0c982c0260f75a67697,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler@sha256:e861d66785047d39eb68d9bac23e3f57ac84d9bd95593502d9b3b913b99fd1a4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume@sha256:b95f09bf3d259f9eacf3b63931977483f5c3c332f49b95ee8a69d8e3fb71d082,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api@sha256:6fc7801c0d18d41b9f11484b1cdb342de9cebd93072ec2205dbe40945715184f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9@sha256:d4d824b80cbed683543d9e8c7045ac97e080774f45a5067ccbca26404e067821,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central@sha256:182ec75938d8d3fb7d8f916373368add24062fec90489aa57776a81d0b36ea20,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns@sha256:9507ba5ab74cbae902e2dc07f89c7b3b5b76d8079e444365fe0eee6000fd7aaa,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer@sha256:17db080dcc4099f8a20aa0f238b6bca5c104672ae46743adeab9d1637725ecaa,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound@sha256:fd55cf3d73bfdc518419c9ba0b0cbef275140ae2d3bd0342a7310f81d57c2d78,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker@sha256:d164a9bd383f50df69fc22e7422f4650cd5076c90ed19278fc0f04e54345a63d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr@sha256:6beffe7d0bd75f9d1f495aeb7ab2334a2414af2c581d4833363df8441ed01018,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid@sha256:261e76f60c6bc6b172dc3608504552c63e83358a4fa3c0952a671544d83aa83f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler@sha256:581b65b646301e0fcb07582150ba63438f1353a85bf9acf1eb2acb4ce71c58bd,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron@sha256:2308c7b6c3d0aabbadfc9a06d84d67d2243f27fe8eed740ee96b1ce910203f62,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd@sha256:02d33f59749441cd5751c319e9d7cff97ab1004844c0e992650d340c6e8fbf43,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent@sha256:9cf0ca292340f1f978603955ef682effbf24316d6e2376b1c89906d84c3f06d0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn@sha256:c3e651f35b930bcf1a3084be8910c2f3f34d22a976c5379cf518a68d9994bfa7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent@sha256:58f678016d7f6c8fe579abe886fd138ef853642faa6766ca60639feac12d82ac,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent@sha256:46f92909153aaf03a585374b77d103c536509747e3270558d9a533295c46a7c5,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter@sha256:39c642b2b337e38c18e80266fb14383754178202f40103646337722a594d984c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent@sha256:7fe367f51638c5c302fd3f8e66a31b09cb3b11519a7f72ef142b6c6fe8b91694,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter@sha256:d339ba049bbd1adccb795962bf163f5b22fd84dea865d88b9eb525e46247d6bd,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api@sha256:9ebf424d4107275a2e3f21f7a18ef257ff2f97c1298109ac7c802a5a4f4794f2,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api@sha256:4fcbe0d9a3c845708ecc32102ad4abbcbd947d87e5cf91f186de75b5d84ec681,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn@sha256:58a4e9a4dea86635c93ce37a2bb3c60ece62b3d656f6ee6a8845347cbb3e90fd,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine@sha256:6f2b843bc9f4ceb1ee873972d69e6bae6e1dbd378b486995bc3697d8bcff6339,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon@sha256:03b4bb79b71d5ca7792d19c4c0ee08a5e5a407ad844c087305c42dd909ee7490,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached@sha256:773daada6402d9cad089cdc809d6c0335456d057ac1a25441ab5d82add2f70f4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis@sha256:7323406a63fb3fdbb3eea4da0f7e8ed89c94c9bd0ad5ecd6c18fa4a4c2c550c4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api@sha256:7ae82068011e2d2e5ddc88c943fd32ff4a11902793e7a1df729811b2e27122a0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor@sha256:0c762c15d9d98d39cc9dc3d1f9a70f9188fef58d4e2f3b0c69c896cab8da5e48,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector@sha256:febf65561eeef5b36b70d0d65ee83f6451e43ec97bfab4d826e14215da6ff19b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent@sha256:b8aadfc3d547c5ef1e27fcb573d4760cf8c2f2271eefe1793c35a0d46b640837,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe@sha256:ecc91fd5079ee6d0c6ae1b11e97da790e33864d0e1930e574f959da2bddfa59a,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent@sha256:2e981e93f99c929a3f04e5e41c8f645d44d390a9aeee3c5193cce7ec2edcbf3a,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone@sha256:1e5714637b6e1a24c2858fe6d9bbb3f00bc61d69ad74a657b1c23682bf4cb2b7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api@sha256:35b8dcf27dc3b67f3840fa0e693ff312f74f7e22c634dff206a5c4d0133c716c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler@sha256:e109e4863e05e803dbfe04917756fd52231c560c65353170a2000be6cc2bb53d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share@sha256:6df0bebd9318ce11624413249e7e9781311638f276f8877668d3b382fe90e62f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:56b75d97f4a48c8cf58b3a7c18c43618efb308bf0188124f6301142e61299b0c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils@sha256:a51ed62767206067aa501142dbf01f20b3d65325d30faf1b4d6424d5b17dfba5,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:c4e71b2158fd939dad8b8e705273493051d3023273d23b279f2699dce6db33df,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api@sha256:592e3cd32d3cc97a69093ad905b449aa374ffbb1b2644b738bb6c1434476d1f6,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute@sha256:5f179b847f2dc32d9110b8f2be9fe65f1aeada1e18105dffdaf052981215d844,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor@sha256:9596452e283febbe08204d0ef0fd1992af3395d0969f7ac76663ed7c8be5b4d4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy@sha256:d61005a10bef1b37762a8a41e6755c1169241e36cc5f92886bca6f4f6b9c381a,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler@sha256:e6a4335bcbeed3cd3e73ac879f754e314761e4a417a67539ca88e96a79346328,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api@sha256:97d88fc53421b699fc91983313d7beec4a0f177089e95bdf5ba15c3f521db9a9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager@sha256:5365e5c9c3ad2ede1b6945255b2cc6b009d642c39babdf25e0655282cfa646fe,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping@sha256:5b55795d774e0ea160ff8a7fd491ed41cf2d93c7d821694abb3a879eaffcefeb,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog@sha256:26e955c46a6063eafcfeb79430bf3d9268dbe95687c00e63a624b3ec5a846f5a,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker@sha256:58939baa18ab09e2b24996c5f3665ae52274b781f661ea06a67c991e9a832d5a,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient@sha256:b8bff6857fec93c3c1521f1a8c23de21bcb86fc0f960972e81f6c3f95d4185be,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather@sha256:943eee724277e252795909137538a553ef5284c8103ad01b9be7b0138c66d14d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter@sha256:ecd56e6733c475f2d441344fd98f288c3eac0261ba113695fec7520a954ccbc7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi@sha256:d97b08fd421065c8c33a523973822ac468500cbe853069aa9214393fbda7a908,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller@sha256:d76f7d6620930cc2e9ac070492bbeb525f83ce5ff4947463e3784bf1ce04a857,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base@sha256:289dea3beea1cd4405895fc42e44372b35e4a941e31c59e102c333471a3ca9b7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server@sha256:9b19894fa67a81bf8ba4159b55b49f38877c670aeb97e2021c341cef2a9294e4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd@sha256:ea164961ad30453ad0301c6b73364e1f1024f689634c88dd98265f9c7048e31d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server@sha256:6f9f2ea45f0271f6da8eb05a5f74cf5ce6769479346f5c2f407ee6f31a9c7ff3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api@sha256:59448516174fc3bab679b9a8dd62cb9a9d16b5734aadbeb98e960e3b7c79bd22,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:adcdeb8ecd601fb03c3b0901d5b5111af2ca48f7dd443e22224db6daaf08f5d0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account@sha256:2bf32d9b95899d7637dfe19d07cf1ecc9a06593984faff57a3c0dce060012edb,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container@sha256:7a452cd18b64d522e8a1e25bdcea543e9fe5f5b76e1c5e044c2b5334e06a326b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object@sha256:6a46aa13aa359b8e782a22d67db42db02bbf2bb7e35df4b684ac1daeda38cde3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server@sha256:f6824854bea6b2acbb00c34639799b4744818d4adbdd40e37dc5088f9ae18d58,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all@sha256:a66d2fdc21f25c690f02e643d2666dbe7df43a64cd55086ec33d6755e6d809b9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api@sha256:e0e84e3fb8f9f12e9280d3d28b415639abfad5b5e46787a61b42beb97f32c76e,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier@sha256:bd5376b807eca8a409ea663906d9a990cd95a109b8f1f2c56c06f9201be1f6ca,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine@sha256:366de9fadc79d3ea77de93ef1c81e4b8525764f0704e3f30d1a8afa2745c8166,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xrfrf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj_openstack-operators(cce3973b-b375-4ea1-b907-0f46e330dfae): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 09 09:17:49 crc kubenswrapper[4710]: I1009 09:17:49.153036 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-dhvdq" event={"ID":"21d3fd5d-1f17-45d0-bf73-59fdc7211820","Type":"ContainerStarted","Data":"c9cebad398c75d9018ab8574dfd0a93832831f60ff3c4636815876c420ec8da1"} Oct 09 09:17:49 crc kubenswrapper[4710]: I1009 09:17:49.155223 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-svtkg" event={"ID":"3b555e01-0210-431d-83ab-97ebcc53a68b","Type":"ContainerStarted","Data":"33d9a3d0e30cefe090f1ed5d550468f6f1aab197b9ae6773dc94913df9409a94"} Oct 09 09:17:49 crc kubenswrapper[4710]: E1009 09:17:49.204216 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-t69gh" podUID="2c55fbcc-5995-4a59-b8ae-dc8be7411fa8" Oct 09 09:17:49 crc kubenswrapper[4710]: E1009 09:17:49.223967 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-nmmkl" podUID="addc94b4-bdbb-4a05-993d-5a7ac2bb3e19" Oct 09 09:17:49 crc kubenswrapper[4710]: E1009 09:17:49.368609 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-rh8n6" podUID="9f1f2915-bd9f-496f-a513-e1fd022ee463" Oct 09 09:17:49 crc kubenswrapper[4710]: E1009 09:17:49.383322 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-759pl" podUID="94e1d5bc-92ce-4e9a-b0c7-bef14a881f9e" Oct 09 09:17:49 crc kubenswrapper[4710]: E1009 09:17:49.392714 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-m95bh" podUID="84815f80-0c57-4246-abe3-7c54bd77d1c1" Oct 09 09:17:49 crc kubenswrapper[4710]: E1009 09:17:49.514213 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj" podUID="cce3973b-b375-4ea1-b907-0f46e330dfae" Oct 09 09:17:50 crc kubenswrapper[4710]: I1009 09:17:50.172573 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj" event={"ID":"cce3973b-b375-4ea1-b907-0f46e330dfae","Type":"ContainerStarted","Data":"d665cc62b962ff8e6639a84dd3049c0696011a7b1057ea8545f7d6eb03d2142a"} Oct 09 09:17:50 crc kubenswrapper[4710]: I1009 09:17:50.172626 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj" event={"ID":"cce3973b-b375-4ea1-b907-0f46e330dfae","Type":"ContainerStarted","Data":"629263c58986b8ef90294de403df28f4e39d89a7a0746fd304e4326d7537ace2"} Oct 09 09:17:50 crc kubenswrapper[4710]: E1009 09:17:50.174765 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:a17fc270857869fd1efe5020b2a1cb8c2abbd838f08de88f3a6a59e8754ec351\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj" podUID="cce3973b-b375-4ea1-b907-0f46e330dfae" Oct 09 09:17:50 crc kubenswrapper[4710]: I1009 09:17:50.175532 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7d6957655c-6trjl" event={"ID":"eda25b03-4fb3-4ace-803c-1d1800196995","Type":"ContainerStarted","Data":"7eabe4f8935695ce4463e9906d1c0fff08033953d086e568cbb50d5ad96d697a"} Oct 09 09:17:50 crc kubenswrapper[4710]: I1009 09:17:50.175568 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7d6957655c-6trjl" event={"ID":"eda25b03-4fb3-4ace-803c-1d1800196995","Type":"ContainerStarted","Data":"597cd197414800f87c4f055e425ed25c99c538fa2763005505a966c8b6e1188a"} Oct 09 09:17:50 crc kubenswrapper[4710]: I1009 09:17:50.175578 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7d6957655c-6trjl" event={"ID":"eda25b03-4fb3-4ace-803c-1d1800196995","Type":"ContainerStarted","Data":"3924fbb2b559af1daca69131307596831d4512ec0b78d381959a3107b3a5543d"} Oct 09 09:17:50 crc kubenswrapper[4710]: I1009 09:17:50.176141 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-7d6957655c-6trjl" Oct 09 09:17:50 crc kubenswrapper[4710]: I1009 09:17:50.179272 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-rh8n6" event={"ID":"9f1f2915-bd9f-496f-a513-e1fd022ee463","Type":"ContainerStarted","Data":"9ee9f017e6975fa0693970850896f2401286a6b74b7f77984ddb2239874fb08f"} Oct 09 09:17:50 crc kubenswrapper[4710]: E1009 09:17:50.185554 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:4b4a17fe08ce00e375afaaec6a28835f5c1784f03d11c4558376ac04130f3a9e\\\"\"" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-rh8n6" podUID="9f1f2915-bd9f-496f-a513-e1fd022ee463" Oct 09 09:17:50 crc kubenswrapper[4710]: I1009 09:17:50.189147 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-292ft" event={"ID":"eb73b966-fd38-499c-a018-d28ad9acda92","Type":"ContainerStarted","Data":"4e647b63a97ad937305c9e63bbe1a528c2f773d0182b7317c50a400cd6b2d185"} Oct 09 09:17:50 crc kubenswrapper[4710]: E1009 09:17:50.201670 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-292ft" podUID="eb73b966-fd38-499c-a018-d28ad9acda92" Oct 09 09:17:50 crc kubenswrapper[4710]: I1009 09:17:50.207929 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-m95bh" event={"ID":"84815f80-0c57-4246-abe3-7c54bd77d1c1","Type":"ContainerStarted","Data":"0d0148cf17f313aacb41e62fc7346c229e8f2c8df77a35acbde30a500fa053d7"} Oct 09 09:17:50 crc kubenswrapper[4710]: E1009 09:17:50.209997 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:5cfb2ae1092445950b39dd59caa9a8c9367f42fb8353a8c3848d3bc729f24492\\\"\"" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-m95bh" podUID="84815f80-0c57-4246-abe3-7c54bd77d1c1" Oct 09 09:17:50 crc kubenswrapper[4710]: I1009 09:17:50.214099 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-759pl" event={"ID":"94e1d5bc-92ce-4e9a-b0c7-bef14a881f9e","Type":"ContainerStarted","Data":"53bbe176bb067b10a6a64c222de14b4da6013cbe764f6e1252f1c698b2d83791"} Oct 09 09:17:50 crc kubenswrapper[4710]: I1009 09:17:50.214130 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-759pl" event={"ID":"94e1d5bc-92ce-4e9a-b0c7-bef14a881f9e","Type":"ContainerStarted","Data":"a5cc219809b2cc7bf66b09b8e652790e44c3b033c3e689cdbf15082e92be7ec9"} Oct 09 09:17:50 crc kubenswrapper[4710]: E1009 09:17:50.215202 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:98a5233f0596591acdf2c6a5838b08be108787cdb6ad1995b2b7886bac0fe6ca\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-759pl" podUID="94e1d5bc-92ce-4e9a-b0c7-bef14a881f9e" Oct 09 09:17:50 crc kubenswrapper[4710]: I1009 09:17:50.216401 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-nmmkl" event={"ID":"addc94b4-bdbb-4a05-993d-5a7ac2bb3e19","Type":"ContainerStarted","Data":"3b34a0206a6d3770c8b0f237ff8f11b86764b1ddcf1b1cf6c0630937b6b6bc83"} Oct 09 09:17:50 crc kubenswrapper[4710]: E1009 09:17:50.225696 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:09deecf840d38ff6af3c924729cf0a9444bc985848bfbe7c918019b88a6bc4d7\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-nmmkl" podUID="addc94b4-bdbb-4a05-993d-5a7ac2bb3e19" Oct 09 09:17:50 crc kubenswrapper[4710]: I1009 09:17:50.244305 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-t69gh" event={"ID":"2c55fbcc-5995-4a59-b8ae-dc8be7411fa8","Type":"ContainerStarted","Data":"79885b57ac10e0b905e2ca843ca5da2a5061b5d70e7b285193ef160e54bf576d"} Oct 09 09:17:50 crc kubenswrapper[4710]: E1009 09:17:50.246572 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:abe978f8da75223de5043cca50278ad4e28c8dd309883f502fe1e7a9998733b0\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-t69gh" podUID="2c55fbcc-5995-4a59-b8ae-dc8be7411fa8" Oct 09 09:17:50 crc kubenswrapper[4710]: I1009 09:17:50.252295 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-7d6957655c-6trjl" podStartSLOduration=3.25228208 podStartE2EDuration="3.25228208s" podCreationTimestamp="2025-10-09 09:17:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:17:50.245382059 +0000 UTC m=+793.735490456" watchObservedRunningTime="2025-10-09 09:17:50.25228208 +0000 UTC m=+793.742390477" Oct 09 09:17:51 crc kubenswrapper[4710]: E1009 09:17:51.269792 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:5cfb2ae1092445950b39dd59caa9a8c9367f42fb8353a8c3848d3bc729f24492\\\"\"" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-m95bh" podUID="84815f80-0c57-4246-abe3-7c54bd77d1c1" Oct 09 09:17:51 crc kubenswrapper[4710]: E1009 09:17:51.269891 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:abe978f8da75223de5043cca50278ad4e28c8dd309883f502fe1e7a9998733b0\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-t69gh" podUID="2c55fbcc-5995-4a59-b8ae-dc8be7411fa8" Oct 09 09:17:51 crc kubenswrapper[4710]: E1009 09:17:51.269972 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:a17fc270857869fd1efe5020b2a1cb8c2abbd838f08de88f3a6a59e8754ec351\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj" podUID="cce3973b-b375-4ea1-b907-0f46e330dfae" Oct 09 09:17:51 crc kubenswrapper[4710]: E1009 09:17:51.272216 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:98a5233f0596591acdf2c6a5838b08be108787cdb6ad1995b2b7886bac0fe6ca\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-759pl" podUID="94e1d5bc-92ce-4e9a-b0c7-bef14a881f9e" Oct 09 09:17:51 crc kubenswrapper[4710]: E1009 09:17:51.272895 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:09deecf840d38ff6af3c924729cf0a9444bc985848bfbe7c918019b88a6bc4d7\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-nmmkl" podUID="addc94b4-bdbb-4a05-993d-5a7ac2bb3e19" Oct 09 09:17:51 crc kubenswrapper[4710]: E1009 09:17:51.277515 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:4b4a17fe08ce00e375afaaec6a28835f5c1784f03d11c4558376ac04130f3a9e\\\"\"" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-rh8n6" podUID="9f1f2915-bd9f-496f-a513-e1fd022ee463" Oct 09 09:17:51 crc kubenswrapper[4710]: E1009 09:17:51.280261 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-292ft" podUID="eb73b966-fd38-499c-a018-d28ad9acda92" Oct 09 09:17:58 crc kubenswrapper[4710]: I1009 09:17:58.165847 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-7d6957655c-6trjl" Oct 09 09:18:01 crc kubenswrapper[4710]: E1009 09:18:01.847964 4710 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/cinder-operator@sha256:c487a793648e64af2d64df5f6efbda2d4fd586acd7aee6838d3ec2b3edd9efb9" Oct 09 09:18:01 crc kubenswrapper[4710]: E1009 09:18:01.848620 4710 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/cinder-operator@sha256:c487a793648e64af2d64df5f6efbda2d4fd586acd7aee6838d3ec2b3edd9efb9,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-v8rsr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-7b7fb68549-7t5cw_openstack-operators(26ad6e31-7002-4043-a971-aa507f4118bf): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 09:18:02 crc kubenswrapper[4710]: E1009 09:18:02.255322 4710 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:47278ed28e02df00892f941763aa0d69547327318e8a983e07f4577acd288167" Oct 09 09:18:02 crc kubenswrapper[4710]: E1009 09:18:02.255857 4710 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:47278ed28e02df00892f941763aa0d69547327318e8a983e07f4577acd288167,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mk8v2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-f9fb45f8f-6lbkw_openstack-operators(72cf445d-90ac-4c98-a1df-3a3a4c2f0c7a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 09:18:03 crc kubenswrapper[4710]: E1009 09:18:03.122606 4710 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:3cc6bba71197ddf88dd4ba1301542bacbc1fe12e6faab2b69e6960944b3d74a0" Oct 09 09:18:03 crc kubenswrapper[4710]: E1009 09:18:03.122876 4710 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:3cc6bba71197ddf88dd4ba1301542bacbc1fe12e6faab2b69e6960944b3d74a0,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-g4b4k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-84b9b84486-4szsc_openstack-operators(10de6ce0-7aa9-471d-8b90-c44fb9bb0ab6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 09:18:03 crc kubenswrapper[4710]: E1009 09:18:03.538091 4710 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:551b59e107c9812f7ad7aa06577376b0dcb58ff9498a41d5d5273e60e20ba7e4" Oct 09 09:18:03 crc kubenswrapper[4710]: E1009 09:18:03.538338 4710 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:551b59e107c9812f7ad7aa06577376b0dcb58ff9498a41d5d5273e60e20ba7e4,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-z45tr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-79db49b9fb-vnvl6_openstack-operators(4a08f5f1-bab4-425c-b81c-b48f2d4a186b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 09:18:03 crc kubenswrapper[4710]: E1009 09:18:03.772221 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-4szsc" podUID="10de6ce0-7aa9-471d-8b90-c44fb9bb0ab6" Oct 09 09:18:03 crc kubenswrapper[4710]: E1009 09:18:03.941642 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-79db49b9fb-vnvl6" podUID="4a08f5f1-bab4-425c-b81c-b48f2d4a186b" Oct 09 09:18:03 crc kubenswrapper[4710]: E1009 09:18:03.957865 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-7t5cw" podUID="26ad6e31-7002-4043-a971-aa507f4118bf" Oct 09 09:18:03 crc kubenswrapper[4710]: E1009 09:18:03.959692 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-6lbkw" podUID="72cf445d-90ac-4c98-a1df-3a3a4c2f0c7a" Oct 09 09:18:04 crc kubenswrapper[4710]: I1009 09:18:04.389684 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-svtkg" event={"ID":"3b555e01-0210-431d-83ab-97ebcc53a68b","Type":"ContainerStarted","Data":"b276961ff2a11318ed73148b8c42bffcc6d114ffc5d1a5774c4ed968c94df173"} Oct 09 09:18:04 crc kubenswrapper[4710]: I1009 09:18:04.395102 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-2htjv" event={"ID":"fa97dde8-95a7-4c4b-820d-d889545d79d5","Type":"ContainerStarted","Data":"5fdc321df950c0b3dcc9fa2cc2e30a9c4c2388f5b550b845829991c1e670974e"} Oct 09 09:18:04 crc kubenswrapper[4710]: I1009 09:18:04.403448 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-qhvrk" event={"ID":"1438d002-6055-453b-8a7a-c83888b37429","Type":"ContainerStarted","Data":"fe76205558f376f8eafb42b6fe6ebf7084421cf048afd8359ae71d6ef5b05afa"} Oct 09 09:18:04 crc kubenswrapper[4710]: I1009 09:18:04.418208 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-7t5cw" event={"ID":"26ad6e31-7002-4043-a971-aa507f4118bf","Type":"ContainerStarted","Data":"f7ae471f238103e57c3da7574c6cff077911c27ba293f5be3ac847dd89aa3e39"} Oct 09 09:18:04 crc kubenswrapper[4710]: I1009 09:18:04.424034 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-qq9g6" event={"ID":"5fc82f31-455f-4960-8538-5315e1a3a09a","Type":"ContainerStarted","Data":"f49bc759912c5ffa288a88b6e3a3b8825f61f2bb3a16543cae6d040e6ce85c3b"} Oct 09 09:18:04 crc kubenswrapper[4710]: E1009 09:18:04.425857 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/cinder-operator@sha256:c487a793648e64af2d64df5f6efbda2d4fd586acd7aee6838d3ec2b3edd9efb9\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-7t5cw" podUID="26ad6e31-7002-4043-a971-aa507f4118bf" Oct 09 09:18:04 crc kubenswrapper[4710]: I1009 09:18:04.445058 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-mvjkw" event={"ID":"8437ff8a-3892-464b-963b-d5afaf9599dc","Type":"ContainerStarted","Data":"53fb69f8e343326492131beeec1a8e2adf4dfcd19aba80e3db7cc133c14b2463"} Oct 09 09:18:04 crc kubenswrapper[4710]: I1009 09:18:04.459067 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-dhvdq" event={"ID":"21d3fd5d-1f17-45d0-bf73-59fdc7211820","Type":"ContainerStarted","Data":"0db0c5c4708459b3d260edb845f072aa3f856f28aae4a95b6e4728aef106b032"} Oct 09 09:18:04 crc kubenswrapper[4710]: I1009 09:18:04.463659 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-d7sqz" event={"ID":"d6f9aa15-3eb4-403a-b4fc-4af18c14d4bf","Type":"ContainerStarted","Data":"0dcd072c1b9d68f4b6de01d4e3d3eb882bbf8ca3b335aeb571212450d4840d45"} Oct 09 09:18:04 crc kubenswrapper[4710]: I1009 09:18:04.479144 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-jb2h2" event={"ID":"f78f287b-b34d-40c0-ad99-caaf90bc2ae7","Type":"ContainerStarted","Data":"d357533ef6778720ef690d127cd63c27efd231f04c0981a064769fb02ed95439"} Oct 09 09:18:04 crc kubenswrapper[4710]: I1009 09:18:04.479213 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-jb2h2" event={"ID":"f78f287b-b34d-40c0-ad99-caaf90bc2ae7","Type":"ContainerStarted","Data":"46371cbee8a9ec4d2f1eee6ae068f448e0f06e5b61390581f198c4c22c621955"} Oct 09 09:18:04 crc kubenswrapper[4710]: I1009 09:18:04.479251 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-jb2h2" Oct 09 09:18:04 crc kubenswrapper[4710]: I1009 09:18:04.490393 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5df598886f-sd5xk" event={"ID":"51113172-27cd-47a3-8bc2-b751cb1654f7","Type":"ContainerStarted","Data":"4a33112f7620e6826a7fc192037d5cd222be5a71b244c09bca625053151a918c"} Oct 09 09:18:04 crc kubenswrapper[4710]: I1009 09:18:04.501425 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-79db49b9fb-vnvl6" event={"ID":"4a08f5f1-bab4-425c-b81c-b48f2d4a186b","Type":"ContainerStarted","Data":"f5cb60ea73a09462ec39be0b40c54496f4ea1875603e1b6415fb73a489b5bd2d"} Oct 09 09:18:04 crc kubenswrapper[4710]: E1009 09:18:04.503220 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:551b59e107c9812f7ad7aa06577376b0dcb58ff9498a41d5d5273e60e20ba7e4\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-79db49b9fb-vnvl6" podUID="4a08f5f1-bab4-425c-b81c-b48f2d4a186b" Oct 09 09:18:04 crc kubenswrapper[4710]: I1009 09:18:04.510785 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-lghdj" event={"ID":"2e6376a2-edb7-4958-b3b3-3a6773782349","Type":"ContainerStarted","Data":"bbdf5ce72243fefaf0808ce9c80b21f08cd3c41413e2ce149a3856ebac147fe9"} Oct 09 09:18:04 crc kubenswrapper[4710]: I1009 09:18:04.512303 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-4szsc" event={"ID":"10de6ce0-7aa9-471d-8b90-c44fb9bb0ab6","Type":"ContainerStarted","Data":"412f5a20098b08610c6caa5d9fced5629652641ee2a2ff255e45aa9b60516d23"} Oct 09 09:18:04 crc kubenswrapper[4710]: E1009 09:18:04.525661 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:3cc6bba71197ddf88dd4ba1301542bacbc1fe12e6faab2b69e6960944b3d74a0\\\"\"" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-4szsc" podUID="10de6ce0-7aa9-471d-8b90-c44fb9bb0ab6" Oct 09 09:18:04 crc kubenswrapper[4710]: I1009 09:18:04.534414 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-jb2h2" podStartSLOduration=3.500031318 podStartE2EDuration="18.53439253s" podCreationTimestamp="2025-10-09 09:17:46 +0000 UTC" firstStartedPulling="2025-10-09 09:17:48.552302825 +0000 UTC m=+792.042411222" lastFinishedPulling="2025-10-09 09:18:03.586664037 +0000 UTC m=+807.076772434" observedRunningTime="2025-10-09 09:18:04.52115527 +0000 UTC m=+808.011263666" watchObservedRunningTime="2025-10-09 09:18:04.53439253 +0000 UTC m=+808.024500928" Oct 09 09:18:04 crc kubenswrapper[4710]: I1009 09:18:04.537366 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-6lbkw" event={"ID":"72cf445d-90ac-4c98-a1df-3a3a4c2f0c7a","Type":"ContainerStarted","Data":"b5663d498d56d0df05c5be287aebb8cb3a007ec0cb84922f5dfa21e339d20736"} Oct 09 09:18:04 crc kubenswrapper[4710]: E1009 09:18:04.553710 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:47278ed28e02df00892f941763aa0d69547327318e8a983e07f4577acd288167\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-6lbkw" podUID="72cf445d-90ac-4c98-a1df-3a3a4c2f0c7a" Oct 09 09:18:04 crc kubenswrapper[4710]: I1009 09:18:04.554207 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5458f77c4-fzwm9" event={"ID":"b8dd2f89-b87d-4669-8c6e-7c8035b6fcd3","Type":"ContainerStarted","Data":"df9db0fa4b9c3a3e666f9602ec07249a93cba296e037a86f6256b4070443268f"} Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.567938 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5df598886f-sd5xk" event={"ID":"51113172-27cd-47a3-8bc2-b751cb1654f7","Type":"ContainerStarted","Data":"ccebc70d81605fdc7e156718b7824bf14584b51ab8dee00d589909ae6ff5b52e"} Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.568654 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-5df598886f-sd5xk" Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.570255 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5458f77c4-fzwm9" event={"ID":"b8dd2f89-b87d-4669-8c6e-7c8035b6fcd3","Type":"ContainerStarted","Data":"4d06f3ed097e2a463435f7679baf1151834e42cb9472d4e33e6db5352d6730b4"} Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.570815 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5458f77c4-fzwm9" Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.573323 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-svtkg" event={"ID":"3b555e01-0210-431d-83ab-97ebcc53a68b","Type":"ContainerStarted","Data":"647441aa3e8f21b082693c3394d38a4f96d0ccc32332937eab6f72315000c481"} Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.573734 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-svtkg" Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.575539 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-qq9g6" event={"ID":"5fc82f31-455f-4960-8538-5315e1a3a09a","Type":"ContainerStarted","Data":"16afc2b38668c440df404dde0ee4dfbaed60b514ecf75f9fdc50565049394762"} Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.575908 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-qq9g6" Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.577214 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-2htjv" event={"ID":"fa97dde8-95a7-4c4b-820d-d889545d79d5","Type":"ContainerStarted","Data":"fd67bf384216d5aaac5da2710b8751506bc822e231234db7903800a96a2e18fd"} Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.577598 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-2htjv" Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.588474 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-d7sqz" event={"ID":"d6f9aa15-3eb4-403a-b4fc-4af18c14d4bf","Type":"ContainerStarted","Data":"b71948036d4c46d989384d9d30ab30738591fee0d8bbdbf3ca71ad35dfeb4766"} Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.588514 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-5df598886f-sd5xk" podStartSLOduration=4.615280326 podStartE2EDuration="19.588503694s" podCreationTimestamp="2025-10-09 09:17:46 +0000 UTC" firstStartedPulling="2025-10-09 09:17:48.610383795 +0000 UTC m=+792.100492192" lastFinishedPulling="2025-10-09 09:18:03.583607163 +0000 UTC m=+807.073715560" observedRunningTime="2025-10-09 09:18:05.586571902 +0000 UTC m=+809.076680299" watchObservedRunningTime="2025-10-09 09:18:05.588503694 +0000 UTC m=+809.078612091" Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.589423 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-d7sqz" Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.599937 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-mvjkw" event={"ID":"8437ff8a-3892-464b-963b-d5afaf9599dc","Type":"ContainerStarted","Data":"ef8bf601fdc1e8d027947b4ed688b74b7f7cc76cbcafe5e8404782c43edb87fb"} Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.600267 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-mvjkw" Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.602044 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-qhvrk" event={"ID":"1438d002-6055-453b-8a7a-c83888b37429","Type":"ContainerStarted","Data":"7839ba76a22f9523069ee44350169893b113c37772304a6f6d1f0062aef54dc0"} Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.602423 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-qhvrk" Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.606248 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-dhvdq" event={"ID":"21d3fd5d-1f17-45d0-bf73-59fdc7211820","Type":"ContainerStarted","Data":"8aa843e46989b1ddf5bd6ffee5ec16f92c74116f652866b35dbac65742d29a35"} Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.606496 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-dhvdq" Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.616190 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-qq9g6" podStartSLOduration=4.6156998080000005 podStartE2EDuration="19.616182126s" podCreationTimestamp="2025-10-09 09:17:46 +0000 UTC" firstStartedPulling="2025-10-09 09:17:48.582285752 +0000 UTC m=+792.072394149" lastFinishedPulling="2025-10-09 09:18:03.58276807 +0000 UTC m=+807.072876467" observedRunningTime="2025-10-09 09:18:05.613408417 +0000 UTC m=+809.103516814" watchObservedRunningTime="2025-10-09 09:18:05.616182126 +0000 UTC m=+809.106290514" Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.626732 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-lghdj" event={"ID":"2e6376a2-edb7-4958-b3b3-3a6773782349","Type":"ContainerStarted","Data":"742670c7d8b85372d1cf9e1b0e28c257f10ccfd1bec6a9b5b8734073effff085"} Oct 09 09:18:05 crc kubenswrapper[4710]: E1009 09:18:05.631593 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:551b59e107c9812f7ad7aa06577376b0dcb58ff9498a41d5d5273e60e20ba7e4\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-79db49b9fb-vnvl6" podUID="4a08f5f1-bab4-425c-b81c-b48f2d4a186b" Oct 09 09:18:05 crc kubenswrapper[4710]: E1009 09:18:05.631677 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/cinder-operator@sha256:c487a793648e64af2d64df5f6efbda2d4fd586acd7aee6838d3ec2b3edd9efb9\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-7t5cw" podUID="26ad6e31-7002-4043-a971-aa507f4118bf" Oct 09 09:18:05 crc kubenswrapper[4710]: E1009 09:18:05.631780 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:3cc6bba71197ddf88dd4ba1301542bacbc1fe12e6faab2b69e6960944b3d74a0\\\"\"" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-4szsc" podUID="10de6ce0-7aa9-471d-8b90-c44fb9bb0ab6" Oct 09 09:18:05 crc kubenswrapper[4710]: E1009 09:18:05.631861 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:47278ed28e02df00892f941763aa0d69547327318e8a983e07f4577acd288167\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-6lbkw" podUID="72cf445d-90ac-4c98-a1df-3a3a4c2f0c7a" Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.677910 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-svtkg" podStartSLOduration=4.721167275 podStartE2EDuration="19.677896248s" podCreationTimestamp="2025-10-09 09:17:46 +0000 UTC" firstStartedPulling="2025-10-09 09:17:48.625969334 +0000 UTC m=+792.116077732" lastFinishedPulling="2025-10-09 09:18:03.582698309 +0000 UTC m=+807.072806705" observedRunningTime="2025-10-09 09:18:05.643310791 +0000 UTC m=+809.133419188" watchObservedRunningTime="2025-10-09 09:18:05.677896248 +0000 UTC m=+809.168004645" Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.680842 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-2htjv" podStartSLOduration=4.717249304 podStartE2EDuration="19.68083548s" podCreationTimestamp="2025-10-09 09:17:46 +0000 UTC" firstStartedPulling="2025-10-09 09:17:48.633563414 +0000 UTC m=+792.123671800" lastFinishedPulling="2025-10-09 09:18:03.597149579 +0000 UTC m=+807.087257976" observedRunningTime="2025-10-09 09:18:05.677801429 +0000 UTC m=+809.167909826" watchObservedRunningTime="2025-10-09 09:18:05.68083548 +0000 UTC m=+809.170943877" Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.703467 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5458f77c4-fzwm9" podStartSLOduration=5.090428351 podStartE2EDuration="19.703458329s" podCreationTimestamp="2025-10-09 09:17:46 +0000 UTC" firstStartedPulling="2025-10-09 09:17:48.969677577 +0000 UTC m=+792.459785975" lastFinishedPulling="2025-10-09 09:18:03.582707556 +0000 UTC m=+807.072815953" observedRunningTime="2025-10-09 09:18:05.696994119 +0000 UTC m=+809.187102517" watchObservedRunningTime="2025-10-09 09:18:05.703458329 +0000 UTC m=+809.193566726" Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.746910 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-lghdj" podStartSLOduration=5.132103434 podStartE2EDuration="19.746883804s" podCreationTimestamp="2025-10-09 09:17:46 +0000 UTC" firstStartedPulling="2025-10-09 09:17:48.968576281 +0000 UTC m=+792.458684677" lastFinishedPulling="2025-10-09 09:18:03.58335665 +0000 UTC m=+807.073465047" observedRunningTime="2025-10-09 09:18:05.722310397 +0000 UTC m=+809.212418794" watchObservedRunningTime="2025-10-09 09:18:05.746883804 +0000 UTC m=+809.236992201" Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.749695 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-dhvdq" podStartSLOduration=4.72318092 podStartE2EDuration="19.749689645s" podCreationTimestamp="2025-10-09 09:17:46 +0000 UTC" firstStartedPulling="2025-10-09 09:17:48.559935047 +0000 UTC m=+792.050043443" lastFinishedPulling="2025-10-09 09:18:03.586443772 +0000 UTC m=+807.076552168" observedRunningTime="2025-10-09 09:18:05.742639991 +0000 UTC m=+809.232748379" watchObservedRunningTime="2025-10-09 09:18:05.749689645 +0000 UTC m=+809.239798042" Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.774413 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-mvjkw" podStartSLOduration=4.02959465 podStartE2EDuration="19.774399349s" podCreationTimestamp="2025-10-09 09:17:46 +0000 UTC" firstStartedPulling="2025-10-09 09:17:47.838734776 +0000 UTC m=+791.328843163" lastFinishedPulling="2025-10-09 09:18:03.583539465 +0000 UTC m=+807.073647862" observedRunningTime="2025-10-09 09:18:05.771905445 +0000 UTC m=+809.262013843" watchObservedRunningTime="2025-10-09 09:18:05.774399349 +0000 UTC m=+809.264507746" Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.881239 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-qhvrk" podStartSLOduration=4.390995134 podStartE2EDuration="19.881200117s" podCreationTimestamp="2025-10-09 09:17:46 +0000 UTC" firstStartedPulling="2025-10-09 09:17:48.094370815 +0000 UTC m=+791.584479213" lastFinishedPulling="2025-10-09 09:18:03.584575799 +0000 UTC m=+807.074684196" observedRunningTime="2025-10-09 09:18:05.87303966 +0000 UTC m=+809.363148057" watchObservedRunningTime="2025-10-09 09:18:05.881200117 +0000 UTC m=+809.371308514" Oct 09 09:18:05 crc kubenswrapper[4710]: I1009 09:18:05.897165 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-d7sqz" podStartSLOduration=4.889539413 podStartE2EDuration="19.897145254s" podCreationTimestamp="2025-10-09 09:17:46 +0000 UTC" firstStartedPulling="2025-10-09 09:17:48.575142132 +0000 UTC m=+792.065250529" lastFinishedPulling="2025-10-09 09:18:03.582747973 +0000 UTC m=+807.072856370" observedRunningTime="2025-10-09 09:18:05.890160544 +0000 UTC m=+809.380268941" watchObservedRunningTime="2025-10-09 09:18:05.897145254 +0000 UTC m=+809.387253651" Oct 09 09:18:06 crc kubenswrapper[4710]: I1009 09:18:06.638057 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-lghdj" Oct 09 09:18:08 crc kubenswrapper[4710]: I1009 09:18:08.622212 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lhjhh"] Oct 09 09:18:08 crc kubenswrapper[4710]: I1009 09:18:08.624190 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lhjhh" Oct 09 09:18:08 crc kubenswrapper[4710]: I1009 09:18:08.627041 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lhjhh"] Oct 09 09:18:08 crc kubenswrapper[4710]: I1009 09:18:08.780357 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce002013-3278-412a-afb1-9d64ccc284f8-catalog-content\") pod \"certified-operators-lhjhh\" (UID: \"ce002013-3278-412a-afb1-9d64ccc284f8\") " pod="openshift-marketplace/certified-operators-lhjhh" Oct 09 09:18:08 crc kubenswrapper[4710]: I1009 09:18:08.780524 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce002013-3278-412a-afb1-9d64ccc284f8-utilities\") pod \"certified-operators-lhjhh\" (UID: \"ce002013-3278-412a-afb1-9d64ccc284f8\") " pod="openshift-marketplace/certified-operators-lhjhh" Oct 09 09:18:08 crc kubenswrapper[4710]: I1009 09:18:08.780551 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5l59n\" (UniqueName: \"kubernetes.io/projected/ce002013-3278-412a-afb1-9d64ccc284f8-kube-api-access-5l59n\") pod \"certified-operators-lhjhh\" (UID: \"ce002013-3278-412a-afb1-9d64ccc284f8\") " pod="openshift-marketplace/certified-operators-lhjhh" Oct 09 09:18:08 crc kubenswrapper[4710]: I1009 09:18:08.882417 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce002013-3278-412a-afb1-9d64ccc284f8-utilities\") pod \"certified-operators-lhjhh\" (UID: \"ce002013-3278-412a-afb1-9d64ccc284f8\") " pod="openshift-marketplace/certified-operators-lhjhh" Oct 09 09:18:08 crc kubenswrapper[4710]: I1009 09:18:08.882485 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5l59n\" (UniqueName: \"kubernetes.io/projected/ce002013-3278-412a-afb1-9d64ccc284f8-kube-api-access-5l59n\") pod \"certified-operators-lhjhh\" (UID: \"ce002013-3278-412a-afb1-9d64ccc284f8\") " pod="openshift-marketplace/certified-operators-lhjhh" Oct 09 09:18:08 crc kubenswrapper[4710]: I1009 09:18:08.882547 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce002013-3278-412a-afb1-9d64ccc284f8-catalog-content\") pod \"certified-operators-lhjhh\" (UID: \"ce002013-3278-412a-afb1-9d64ccc284f8\") " pod="openshift-marketplace/certified-operators-lhjhh" Oct 09 09:18:08 crc kubenswrapper[4710]: I1009 09:18:08.882897 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce002013-3278-412a-afb1-9d64ccc284f8-utilities\") pod \"certified-operators-lhjhh\" (UID: \"ce002013-3278-412a-afb1-9d64ccc284f8\") " pod="openshift-marketplace/certified-operators-lhjhh" Oct 09 09:18:08 crc kubenswrapper[4710]: I1009 09:18:08.883274 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce002013-3278-412a-afb1-9d64ccc284f8-catalog-content\") pod \"certified-operators-lhjhh\" (UID: \"ce002013-3278-412a-afb1-9d64ccc284f8\") " pod="openshift-marketplace/certified-operators-lhjhh" Oct 09 09:18:08 crc kubenswrapper[4710]: I1009 09:18:08.908059 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5l59n\" (UniqueName: \"kubernetes.io/projected/ce002013-3278-412a-afb1-9d64ccc284f8-kube-api-access-5l59n\") pod \"certified-operators-lhjhh\" (UID: \"ce002013-3278-412a-afb1-9d64ccc284f8\") " pod="openshift-marketplace/certified-operators-lhjhh" Oct 09 09:18:08 crc kubenswrapper[4710]: I1009 09:18:08.951212 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lhjhh" Oct 09 09:18:11 crc kubenswrapper[4710]: I1009 09:18:11.564885 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lhjhh"] Oct 09 09:18:11 crc kubenswrapper[4710]: I1009 09:18:11.680366 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-rh8n6" event={"ID":"9f1f2915-bd9f-496f-a513-e1fd022ee463","Type":"ContainerStarted","Data":"e7c11dc88ac337a11cfdb8ea660db3cfe376ddc0d2163383e9307c4fd937dd55"} Oct 09 09:18:11 crc kubenswrapper[4710]: I1009 09:18:11.680554 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-rh8n6" Oct 09 09:18:11 crc kubenswrapper[4710]: I1009 09:18:11.686149 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-292ft" event={"ID":"eb73b966-fd38-499c-a018-d28ad9acda92","Type":"ContainerStarted","Data":"0163988a2d5132b5de31eaaa2ccb88a9b8bdb3cfc04cb3e1837b777459b3f0f7"} Oct 09 09:18:11 crc kubenswrapper[4710]: I1009 09:18:11.688093 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-m95bh" event={"ID":"84815f80-0c57-4246-abe3-7c54bd77d1c1","Type":"ContainerStarted","Data":"b2b2c0b0c62a7a548daa4b0bbd4b5f0eaff0dd6ff89c65033410db81ecc66bf0"} Oct 09 09:18:11 crc kubenswrapper[4710]: I1009 09:18:11.688775 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-m95bh" Oct 09 09:18:11 crc kubenswrapper[4710]: I1009 09:18:11.693719 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-759pl" event={"ID":"94e1d5bc-92ce-4e9a-b0c7-bef14a881f9e","Type":"ContainerStarted","Data":"b6fbd961c282fc972988e320bbc87248a085c961db4064e709ac65c4d0f590bc"} Oct 09 09:18:11 crc kubenswrapper[4710]: I1009 09:18:11.693963 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-759pl" Oct 09 09:18:11 crc kubenswrapper[4710]: I1009 09:18:11.697766 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-nmmkl" event={"ID":"addc94b4-bdbb-4a05-993d-5a7ac2bb3e19","Type":"ContainerStarted","Data":"376155fdfc2870d95e7edcefc10798a8fcd4b4bf6fe1a01039fbebfd1ebc5091"} Oct 09 09:18:11 crc kubenswrapper[4710]: I1009 09:18:11.698205 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-nmmkl" Oct 09 09:18:11 crc kubenswrapper[4710]: I1009 09:18:11.699583 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lhjhh" event={"ID":"ce002013-3278-412a-afb1-9d64ccc284f8","Type":"ContainerStarted","Data":"21b9b3c6f0015bd170c691edb0b035881dd32601be12044403335f0251e842fe"} Oct 09 09:18:11 crc kubenswrapper[4710]: I1009 09:18:11.702417 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-t69gh" event={"ID":"2c55fbcc-5995-4a59-b8ae-dc8be7411fa8","Type":"ContainerStarted","Data":"b92df4705dfcc80e1f07eebf5da9690bdc8be3a76e440cff2a1ac544fd8f2020"} Oct 09 09:18:11 crc kubenswrapper[4710]: I1009 09:18:11.702663 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-t69gh" Oct 09 09:18:11 crc kubenswrapper[4710]: I1009 09:18:11.705556 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-rh8n6" podStartSLOduration=3.534893605 podStartE2EDuration="25.705544017s" podCreationTimestamp="2025-10-09 09:17:46 +0000 UTC" firstStartedPulling="2025-10-09 09:17:48.993929077 +0000 UTC m=+792.484037474" lastFinishedPulling="2025-10-09 09:18:11.164579489 +0000 UTC m=+814.654687886" observedRunningTime="2025-10-09 09:18:11.701331134 +0000 UTC m=+815.191439531" watchObservedRunningTime="2025-10-09 09:18:11.705544017 +0000 UTC m=+815.195652414" Oct 09 09:18:11 crc kubenswrapper[4710]: I1009 09:18:11.707371 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj" event={"ID":"cce3973b-b375-4ea1-b907-0f46e330dfae","Type":"ContainerStarted","Data":"ee77e785eb169afc1146ee3535dcfb82d887bf4a35ba43e11f040db1ad24dbbe"} Oct 09 09:18:11 crc kubenswrapper[4710]: I1009 09:18:11.707622 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj" Oct 09 09:18:11 crc kubenswrapper[4710]: I1009 09:18:11.762776 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-292ft" podStartSLOduration=2.6026721630000003 podStartE2EDuration="24.762759386s" podCreationTimestamp="2025-10-09 09:17:47 +0000 UTC" firstStartedPulling="2025-10-09 09:17:49.024582609 +0000 UTC m=+792.514691006" lastFinishedPulling="2025-10-09 09:18:11.184669831 +0000 UTC m=+814.674778229" observedRunningTime="2025-10-09 09:18:11.759609437 +0000 UTC m=+815.249717824" watchObservedRunningTime="2025-10-09 09:18:11.762759386 +0000 UTC m=+815.252867783" Oct 09 09:18:11 crc kubenswrapper[4710]: I1009 09:18:11.804054 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-nmmkl" podStartSLOduration=3.625031605 podStartE2EDuration="25.804036299s" podCreationTimestamp="2025-10-09 09:17:46 +0000 UTC" firstStartedPulling="2025-10-09 09:17:48.985544747 +0000 UTC m=+792.475653145" lastFinishedPulling="2025-10-09 09:18:11.164549442 +0000 UTC m=+814.654657839" observedRunningTime="2025-10-09 09:18:11.797948851 +0000 UTC m=+815.288057248" watchObservedRunningTime="2025-10-09 09:18:11.804036299 +0000 UTC m=+815.294144697" Oct 09 09:18:11 crc kubenswrapper[4710]: I1009 09:18:11.812908 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-759pl" podStartSLOduration=3.715409977 podStartE2EDuration="25.8128872s" podCreationTimestamp="2025-10-09 09:17:46 +0000 UTC" firstStartedPulling="2025-10-09 09:17:49.067081878 +0000 UTC m=+792.557190275" lastFinishedPulling="2025-10-09 09:18:11.1645591 +0000 UTC m=+814.654667498" observedRunningTime="2025-10-09 09:18:11.81117981 +0000 UTC m=+815.301288207" watchObservedRunningTime="2025-10-09 09:18:11.8128872 +0000 UTC m=+815.302995597" Oct 09 09:18:11 crc kubenswrapper[4710]: I1009 09:18:11.827210 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-m95bh" podStartSLOduration=3.677831167 podStartE2EDuration="25.827201661s" podCreationTimestamp="2025-10-09 09:17:46 +0000 UTC" firstStartedPulling="2025-10-09 09:17:49.016111646 +0000 UTC m=+792.506220043" lastFinishedPulling="2025-10-09 09:18:11.165482141 +0000 UTC m=+814.655590537" observedRunningTime="2025-10-09 09:18:11.824573656 +0000 UTC m=+815.314682053" watchObservedRunningTime="2025-10-09 09:18:11.827201661 +0000 UTC m=+815.317310059" Oct 09 09:18:11 crc kubenswrapper[4710]: I1009 09:18:11.842952 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-t69gh" podStartSLOduration=3.649450932 podStartE2EDuration="25.842918629s" podCreationTimestamp="2025-10-09 09:17:46 +0000 UTC" firstStartedPulling="2025-10-09 09:17:48.985240092 +0000 UTC m=+792.475348489" lastFinishedPulling="2025-10-09 09:18:11.178707789 +0000 UTC m=+814.668816186" observedRunningTime="2025-10-09 09:18:11.840253312 +0000 UTC m=+815.330361709" watchObservedRunningTime="2025-10-09 09:18:11.842918629 +0000 UTC m=+815.333027025" Oct 09 09:18:11 crc kubenswrapper[4710]: I1009 09:18:11.877404 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj" podStartSLOduration=3.833340582 podStartE2EDuration="25.877377896s" podCreationTimestamp="2025-10-09 09:17:46 +0000 UTC" firstStartedPulling="2025-10-09 09:17:49.135719323 +0000 UTC m=+792.625827720" lastFinishedPulling="2025-10-09 09:18:11.179756637 +0000 UTC m=+814.669865034" observedRunningTime="2025-10-09 09:18:11.869768578 +0000 UTC m=+815.359876975" watchObservedRunningTime="2025-10-09 09:18:11.877377896 +0000 UTC m=+815.367486294" Oct 09 09:18:12 crc kubenswrapper[4710]: I1009 09:18:12.717125 4710 generic.go:334] "Generic (PLEG): container finished" podID="ce002013-3278-412a-afb1-9d64ccc284f8" containerID="202793c6e2362bb992cacf2331db10c0afb76a90802a7276b385df5a3a241be2" exitCode=0 Oct 09 09:18:12 crc kubenswrapper[4710]: I1009 09:18:12.717175 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lhjhh" event={"ID":"ce002013-3278-412a-afb1-9d64ccc284f8","Type":"ContainerDied","Data":"202793c6e2362bb992cacf2331db10c0afb76a90802a7276b385df5a3a241be2"} Oct 09 09:18:14 crc kubenswrapper[4710]: I1009 09:18:14.734816 4710 generic.go:334] "Generic (PLEG): container finished" podID="ce002013-3278-412a-afb1-9d64ccc284f8" containerID="e9b48bafc02d97b3a1bc353e0d989944c1c2ef4c1313f9d515ef7968956e11ff" exitCode=0 Oct 09 09:18:14 crc kubenswrapper[4710]: I1009 09:18:14.734900 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lhjhh" event={"ID":"ce002013-3278-412a-afb1-9d64ccc284f8","Type":"ContainerDied","Data":"e9b48bafc02d97b3a1bc353e0d989944c1c2ef4c1313f9d515ef7968956e11ff"} Oct 09 09:18:15 crc kubenswrapper[4710]: I1009 09:18:15.745419 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lhjhh" event={"ID":"ce002013-3278-412a-afb1-9d64ccc284f8","Type":"ContainerStarted","Data":"7511d25663f706f52b3b04adb03f26d1d3465abca831f26351110407fc309036"} Oct 09 09:18:15 crc kubenswrapper[4710]: I1009 09:18:15.763764 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lhjhh" podStartSLOduration=5.230375394 podStartE2EDuration="7.763750953s" podCreationTimestamp="2025-10-09 09:18:08 +0000 UTC" firstStartedPulling="2025-10-09 09:18:12.718893119 +0000 UTC m=+816.209001516" lastFinishedPulling="2025-10-09 09:18:15.252268677 +0000 UTC m=+818.742377075" observedRunningTime="2025-10-09 09:18:15.762413512 +0000 UTC m=+819.252521909" watchObservedRunningTime="2025-10-09 09:18:15.763750953 +0000 UTC m=+819.253859351" Oct 09 09:18:16 crc kubenswrapper[4710]: I1009 09:18:16.543565 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-658bdf4b74-mvjkw" Oct 09 09:18:16 crc kubenswrapper[4710]: I1009 09:18:16.601067 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-85d5d9dd78-qq9g6" Oct 09 09:18:16 crc kubenswrapper[4710]: I1009 09:18:16.712375 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-858f76bbdd-qhvrk" Oct 09 09:18:16 crc kubenswrapper[4710]: I1009 09:18:16.755797 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-6lbkw" event={"ID":"72cf445d-90ac-4c98-a1df-3a3a4c2f0c7a","Type":"ContainerStarted","Data":"a09d4bc80acd2e07e05e4f7ae7b05b92fe21a3b44a968ddd89daeceeecc328fe"} Oct 09 09:18:16 crc kubenswrapper[4710]: I1009 09:18:16.756185 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-6lbkw" Oct 09 09:18:16 crc kubenswrapper[4710]: I1009 09:18:16.794286 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-6lbkw" podStartSLOduration=3.100865733 podStartE2EDuration="30.794268779s" podCreationTimestamp="2025-10-09 09:17:46 +0000 UTC" firstStartedPulling="2025-10-09 09:17:48.628787909 +0000 UTC m=+792.118896306" lastFinishedPulling="2025-10-09 09:18:16.322190955 +0000 UTC m=+819.812299352" observedRunningTime="2025-10-09 09:18:16.792613938 +0000 UTC m=+820.282722335" watchObservedRunningTime="2025-10-09 09:18:16.794268779 +0000 UTC m=+820.284377176" Oct 09 09:18:16 crc kubenswrapper[4710]: I1009 09:18:16.826781 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-9c5c78d49-dhvdq" Oct 09 09:18:16 crc kubenswrapper[4710]: I1009 09:18:16.826823 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-7ffbcb7588-svtkg" Oct 09 09:18:16 crc kubenswrapper[4710]: I1009 09:18:16.844554 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-55b6b7c7b8-jb2h2" Oct 09 09:18:16 crc kubenswrapper[4710]: I1009 09:18:16.886959 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-5f67fbc655-2htjv" Oct 09 09:18:16 crc kubenswrapper[4710]: I1009 09:18:16.951517 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-79d585cb66-d7sqz" Oct 09 09:18:16 crc kubenswrapper[4710]: I1009 09:18:16.979126 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-5df598886f-sd5xk" Oct 09 09:18:17 crc kubenswrapper[4710]: I1009 09:18:17.019395 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-69fdcfc5f5-nmmkl" Oct 09 09:18:17 crc kubenswrapper[4710]: I1009 09:18:17.248561 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-68b6c87b68-lghdj" Oct 09 09:18:17 crc kubenswrapper[4710]: I1009 09:18:17.279447 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-db6d7f97b-rh8n6" Oct 09 09:18:17 crc kubenswrapper[4710]: I1009 09:18:17.344414 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5458f77c4-fzwm9" Oct 09 09:18:17 crc kubenswrapper[4710]: I1009 09:18:17.378840 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-67cfc6749b-t69gh" Oct 09 09:18:17 crc kubenswrapper[4710]: I1009 09:18:17.387233 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-656bcbd775-m95bh" Oct 09 09:18:17 crc kubenswrapper[4710]: I1009 09:18:17.458341 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-7f554bff7b-759pl" Oct 09 09:18:17 crc kubenswrapper[4710]: I1009 09:18:17.764096 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-79db49b9fb-vnvl6" event={"ID":"4a08f5f1-bab4-425c-b81c-b48f2d4a186b","Type":"ContainerStarted","Data":"e87d999951a4863a8298ddf31dbfafabe776e37755864d612a4e5712cfbccc13"} Oct 09 09:18:17 crc kubenswrapper[4710]: I1009 09:18:17.764261 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-79db49b9fb-vnvl6" Oct 09 09:18:17 crc kubenswrapper[4710]: I1009 09:18:17.776667 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-79db49b9fb-vnvl6" podStartSLOduration=3.260324386 podStartE2EDuration="31.77664214s" podCreationTimestamp="2025-10-09 09:17:46 +0000 UTC" firstStartedPulling="2025-10-09 09:17:48.932007505 +0000 UTC m=+792.422115902" lastFinishedPulling="2025-10-09 09:18:17.44832526 +0000 UTC m=+820.938433656" observedRunningTime="2025-10-09 09:18:17.775731513 +0000 UTC m=+821.265839910" watchObservedRunningTime="2025-10-09 09:18:17.77664214 +0000 UTC m=+821.266750537" Oct 09 09:18:18 crc kubenswrapper[4710]: I1009 09:18:18.648740 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj" Oct 09 09:18:18 crc kubenswrapper[4710]: I1009 09:18:18.773871 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-4szsc" event={"ID":"10de6ce0-7aa9-471d-8b90-c44fb9bb0ab6","Type":"ContainerStarted","Data":"43c160131714fa01ca81339b2516bde576ba59ba69877304353c77f37c04f0a3"} Oct 09 09:18:18 crc kubenswrapper[4710]: I1009 09:18:18.793216 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-4szsc" podStartSLOduration=2.917020114 podStartE2EDuration="32.793187869s" podCreationTimestamp="2025-10-09 09:17:46 +0000 UTC" firstStartedPulling="2025-10-09 09:17:48.574762336 +0000 UTC m=+792.064870733" lastFinishedPulling="2025-10-09 09:18:18.45093009 +0000 UTC m=+821.941038488" observedRunningTime="2025-10-09 09:18:18.78731238 +0000 UTC m=+822.277420777" watchObservedRunningTime="2025-10-09 09:18:18.793187869 +0000 UTC m=+822.283296266" Oct 09 09:18:18 crc kubenswrapper[4710]: I1009 09:18:18.952167 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lhjhh" Oct 09 09:18:18 crc kubenswrapper[4710]: I1009 09:18:18.952322 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lhjhh" Oct 09 09:18:18 crc kubenswrapper[4710]: I1009 09:18:18.991063 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lhjhh" Oct 09 09:18:20 crc kubenswrapper[4710]: I1009 09:18:20.791663 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-7t5cw" event={"ID":"26ad6e31-7002-4043-a971-aa507f4118bf","Type":"ContainerStarted","Data":"c95ad8ce44bfbde0bfb28213c69b9cf958de767d7043d38610134984b2d7b6ea"} Oct 09 09:18:20 crc kubenswrapper[4710]: I1009 09:18:20.792658 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-7t5cw" Oct 09 09:18:20 crc kubenswrapper[4710]: I1009 09:18:20.810457 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-7t5cw" podStartSLOduration=2.215775573 podStartE2EDuration="34.81041883s" podCreationTimestamp="2025-10-09 09:17:46 +0000 UTC" firstStartedPulling="2025-10-09 09:17:47.783986179 +0000 UTC m=+791.274094576" lastFinishedPulling="2025-10-09 09:18:20.378629436 +0000 UTC m=+823.868737833" observedRunningTime="2025-10-09 09:18:20.808172183 +0000 UTC m=+824.298280580" watchObservedRunningTime="2025-10-09 09:18:20.81041883 +0000 UTC m=+824.300527226" Oct 09 09:18:20 crc kubenswrapper[4710]: I1009 09:18:20.829709 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lhjhh" Oct 09 09:18:20 crc kubenswrapper[4710]: I1009 09:18:20.873591 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lhjhh"] Oct 09 09:18:22 crc kubenswrapper[4710]: I1009 09:18:22.803929 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lhjhh" podUID="ce002013-3278-412a-afb1-9d64ccc284f8" containerName="registry-server" containerID="cri-o://7511d25663f706f52b3b04adb03f26d1d3465abca831f26351110407fc309036" gracePeriod=2 Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.153120 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lhjhh" Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.224562 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce002013-3278-412a-afb1-9d64ccc284f8-catalog-content\") pod \"ce002013-3278-412a-afb1-9d64ccc284f8\" (UID: \"ce002013-3278-412a-afb1-9d64ccc284f8\") " Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.224652 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce002013-3278-412a-afb1-9d64ccc284f8-utilities\") pod \"ce002013-3278-412a-afb1-9d64ccc284f8\" (UID: \"ce002013-3278-412a-afb1-9d64ccc284f8\") " Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.224692 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5l59n\" (UniqueName: \"kubernetes.io/projected/ce002013-3278-412a-afb1-9d64ccc284f8-kube-api-access-5l59n\") pod \"ce002013-3278-412a-afb1-9d64ccc284f8\" (UID: \"ce002013-3278-412a-afb1-9d64ccc284f8\") " Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.225215 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce002013-3278-412a-afb1-9d64ccc284f8-utilities" (OuterVolumeSpecName: "utilities") pod "ce002013-3278-412a-afb1-9d64ccc284f8" (UID: "ce002013-3278-412a-afb1-9d64ccc284f8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.237582 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce002013-3278-412a-afb1-9d64ccc284f8-kube-api-access-5l59n" (OuterVolumeSpecName: "kube-api-access-5l59n") pod "ce002013-3278-412a-afb1-9d64ccc284f8" (UID: "ce002013-3278-412a-afb1-9d64ccc284f8"). InnerVolumeSpecName "kube-api-access-5l59n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.269070 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce002013-3278-412a-afb1-9d64ccc284f8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ce002013-3278-412a-afb1-9d64ccc284f8" (UID: "ce002013-3278-412a-afb1-9d64ccc284f8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.326574 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5l59n\" (UniqueName: \"kubernetes.io/projected/ce002013-3278-412a-afb1-9d64ccc284f8-kube-api-access-5l59n\") on node \"crc\" DevicePath \"\"" Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.326606 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce002013-3278-412a-afb1-9d64ccc284f8-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.326618 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce002013-3278-412a-afb1-9d64ccc284f8-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.814292 4710 generic.go:334] "Generic (PLEG): container finished" podID="ce002013-3278-412a-afb1-9d64ccc284f8" containerID="7511d25663f706f52b3b04adb03f26d1d3465abca831f26351110407fc309036" exitCode=0 Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.814342 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lhjhh" event={"ID":"ce002013-3278-412a-afb1-9d64ccc284f8","Type":"ContainerDied","Data":"7511d25663f706f52b3b04adb03f26d1d3465abca831f26351110407fc309036"} Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.814370 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lhjhh" event={"ID":"ce002013-3278-412a-afb1-9d64ccc284f8","Type":"ContainerDied","Data":"21b9b3c6f0015bd170c691edb0b035881dd32601be12044403335f0251e842fe"} Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.814372 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lhjhh" Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.814389 4710 scope.go:117] "RemoveContainer" containerID="7511d25663f706f52b3b04adb03f26d1d3465abca831f26351110407fc309036" Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.835033 4710 scope.go:117] "RemoveContainer" containerID="e9b48bafc02d97b3a1bc353e0d989944c1c2ef4c1313f9d515ef7968956e11ff" Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.846396 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lhjhh"] Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.850505 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lhjhh"] Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.865447 4710 scope.go:117] "RemoveContainer" containerID="202793c6e2362bb992cacf2331db10c0afb76a90802a7276b385df5a3a241be2" Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.878102 4710 scope.go:117] "RemoveContainer" containerID="7511d25663f706f52b3b04adb03f26d1d3465abca831f26351110407fc309036" Oct 09 09:18:23 crc kubenswrapper[4710]: E1009 09:18:23.878503 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7511d25663f706f52b3b04adb03f26d1d3465abca831f26351110407fc309036\": container with ID starting with 7511d25663f706f52b3b04adb03f26d1d3465abca831f26351110407fc309036 not found: ID does not exist" containerID="7511d25663f706f52b3b04adb03f26d1d3465abca831f26351110407fc309036" Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.878666 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7511d25663f706f52b3b04adb03f26d1d3465abca831f26351110407fc309036"} err="failed to get container status \"7511d25663f706f52b3b04adb03f26d1d3465abca831f26351110407fc309036\": rpc error: code = NotFound desc = could not find container \"7511d25663f706f52b3b04adb03f26d1d3465abca831f26351110407fc309036\": container with ID starting with 7511d25663f706f52b3b04adb03f26d1d3465abca831f26351110407fc309036 not found: ID does not exist" Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.878764 4710 scope.go:117] "RemoveContainer" containerID="e9b48bafc02d97b3a1bc353e0d989944c1c2ef4c1313f9d515ef7968956e11ff" Oct 09 09:18:23 crc kubenswrapper[4710]: E1009 09:18:23.879249 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9b48bafc02d97b3a1bc353e0d989944c1c2ef4c1313f9d515ef7968956e11ff\": container with ID starting with e9b48bafc02d97b3a1bc353e0d989944c1c2ef4c1313f9d515ef7968956e11ff not found: ID does not exist" containerID="e9b48bafc02d97b3a1bc353e0d989944c1c2ef4c1313f9d515ef7968956e11ff" Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.879296 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9b48bafc02d97b3a1bc353e0d989944c1c2ef4c1313f9d515ef7968956e11ff"} err="failed to get container status \"e9b48bafc02d97b3a1bc353e0d989944c1c2ef4c1313f9d515ef7968956e11ff\": rpc error: code = NotFound desc = could not find container \"e9b48bafc02d97b3a1bc353e0d989944c1c2ef4c1313f9d515ef7968956e11ff\": container with ID starting with e9b48bafc02d97b3a1bc353e0d989944c1c2ef4c1313f9d515ef7968956e11ff not found: ID does not exist" Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.879327 4710 scope.go:117] "RemoveContainer" containerID="202793c6e2362bb992cacf2331db10c0afb76a90802a7276b385df5a3a241be2" Oct 09 09:18:23 crc kubenswrapper[4710]: E1009 09:18:23.879767 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"202793c6e2362bb992cacf2331db10c0afb76a90802a7276b385df5a3a241be2\": container with ID starting with 202793c6e2362bb992cacf2331db10c0afb76a90802a7276b385df5a3a241be2 not found: ID does not exist" containerID="202793c6e2362bb992cacf2331db10c0afb76a90802a7276b385df5a3a241be2" Oct 09 09:18:23 crc kubenswrapper[4710]: I1009 09:18:23.879878 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"202793c6e2362bb992cacf2331db10c0afb76a90802a7276b385df5a3a241be2"} err="failed to get container status \"202793c6e2362bb992cacf2331db10c0afb76a90802a7276b385df5a3a241be2\": rpc error: code = NotFound desc = could not find container \"202793c6e2362bb992cacf2331db10c0afb76a90802a7276b385df5a3a241be2\": container with ID starting with 202793c6e2362bb992cacf2331db10c0afb76a90802a7276b385df5a3a241be2 not found: ID does not exist" Oct 09 09:18:24 crc kubenswrapper[4710]: I1009 09:18:24.823418 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce002013-3278-412a-afb1-9d64ccc284f8" path="/var/lib/kubelet/pods/ce002013-3278-412a-afb1-9d64ccc284f8/volumes" Oct 09 09:18:26 crc kubenswrapper[4710]: I1009 09:18:26.552506 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-7b7fb68549-7t5cw" Oct 09 09:18:26 crc kubenswrapper[4710]: I1009 09:18:26.654817 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-4szsc" Oct 09 09:18:26 crc kubenswrapper[4710]: I1009 09:18:26.657564 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-84b9b84486-4szsc" Oct 09 09:18:26 crc kubenswrapper[4710]: I1009 09:18:26.948596 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-f9fb45f8f-6lbkw" Oct 09 09:18:27 crc kubenswrapper[4710]: I1009 09:18:27.164565 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-79db49b9fb-vnvl6" Oct 09 09:18:29 crc kubenswrapper[4710]: I1009 09:18:29.509356 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-t2vs7"] Oct 09 09:18:29 crc kubenswrapper[4710]: E1009 09:18:29.509995 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce002013-3278-412a-afb1-9d64ccc284f8" containerName="extract-utilities" Oct 09 09:18:29 crc kubenswrapper[4710]: I1009 09:18:29.510009 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce002013-3278-412a-afb1-9d64ccc284f8" containerName="extract-utilities" Oct 09 09:18:29 crc kubenswrapper[4710]: E1009 09:18:29.510052 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce002013-3278-412a-afb1-9d64ccc284f8" containerName="registry-server" Oct 09 09:18:29 crc kubenswrapper[4710]: I1009 09:18:29.510058 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce002013-3278-412a-afb1-9d64ccc284f8" containerName="registry-server" Oct 09 09:18:29 crc kubenswrapper[4710]: E1009 09:18:29.510071 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce002013-3278-412a-afb1-9d64ccc284f8" containerName="extract-content" Oct 09 09:18:29 crc kubenswrapper[4710]: I1009 09:18:29.510076 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce002013-3278-412a-afb1-9d64ccc284f8" containerName="extract-content" Oct 09 09:18:29 crc kubenswrapper[4710]: I1009 09:18:29.510209 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce002013-3278-412a-afb1-9d64ccc284f8" containerName="registry-server" Oct 09 09:18:29 crc kubenswrapper[4710]: I1009 09:18:29.511179 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t2vs7" Oct 09 09:18:29 crc kubenswrapper[4710]: I1009 09:18:29.554049 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t2vs7"] Oct 09 09:18:29 crc kubenswrapper[4710]: I1009 09:18:29.621807 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c94bfc3d-c4f1-40a4-ae33-acb235d1df82-catalog-content\") pod \"redhat-operators-t2vs7\" (UID: \"c94bfc3d-c4f1-40a4-ae33-acb235d1df82\") " pod="openshift-marketplace/redhat-operators-t2vs7" Oct 09 09:18:29 crc kubenswrapper[4710]: I1009 09:18:29.621879 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rncnc\" (UniqueName: \"kubernetes.io/projected/c94bfc3d-c4f1-40a4-ae33-acb235d1df82-kube-api-access-rncnc\") pod \"redhat-operators-t2vs7\" (UID: \"c94bfc3d-c4f1-40a4-ae33-acb235d1df82\") " pod="openshift-marketplace/redhat-operators-t2vs7" Oct 09 09:18:29 crc kubenswrapper[4710]: I1009 09:18:29.621997 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c94bfc3d-c4f1-40a4-ae33-acb235d1df82-utilities\") pod \"redhat-operators-t2vs7\" (UID: \"c94bfc3d-c4f1-40a4-ae33-acb235d1df82\") " pod="openshift-marketplace/redhat-operators-t2vs7" Oct 09 09:18:29 crc kubenswrapper[4710]: I1009 09:18:29.723887 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c94bfc3d-c4f1-40a4-ae33-acb235d1df82-catalog-content\") pod \"redhat-operators-t2vs7\" (UID: \"c94bfc3d-c4f1-40a4-ae33-acb235d1df82\") " pod="openshift-marketplace/redhat-operators-t2vs7" Oct 09 09:18:29 crc kubenswrapper[4710]: I1009 09:18:29.723959 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rncnc\" (UniqueName: \"kubernetes.io/projected/c94bfc3d-c4f1-40a4-ae33-acb235d1df82-kube-api-access-rncnc\") pod \"redhat-operators-t2vs7\" (UID: \"c94bfc3d-c4f1-40a4-ae33-acb235d1df82\") " pod="openshift-marketplace/redhat-operators-t2vs7" Oct 09 09:18:29 crc kubenswrapper[4710]: I1009 09:18:29.724004 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c94bfc3d-c4f1-40a4-ae33-acb235d1df82-utilities\") pod \"redhat-operators-t2vs7\" (UID: \"c94bfc3d-c4f1-40a4-ae33-acb235d1df82\") " pod="openshift-marketplace/redhat-operators-t2vs7" Oct 09 09:18:29 crc kubenswrapper[4710]: I1009 09:18:29.724638 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c94bfc3d-c4f1-40a4-ae33-acb235d1df82-utilities\") pod \"redhat-operators-t2vs7\" (UID: \"c94bfc3d-c4f1-40a4-ae33-acb235d1df82\") " pod="openshift-marketplace/redhat-operators-t2vs7" Oct 09 09:18:29 crc kubenswrapper[4710]: I1009 09:18:29.724767 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c94bfc3d-c4f1-40a4-ae33-acb235d1df82-catalog-content\") pod \"redhat-operators-t2vs7\" (UID: \"c94bfc3d-c4f1-40a4-ae33-acb235d1df82\") " pod="openshift-marketplace/redhat-operators-t2vs7" Oct 09 09:18:29 crc kubenswrapper[4710]: I1009 09:18:29.744408 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rncnc\" (UniqueName: \"kubernetes.io/projected/c94bfc3d-c4f1-40a4-ae33-acb235d1df82-kube-api-access-rncnc\") pod \"redhat-operators-t2vs7\" (UID: \"c94bfc3d-c4f1-40a4-ae33-acb235d1df82\") " pod="openshift-marketplace/redhat-operators-t2vs7" Oct 09 09:18:29 crc kubenswrapper[4710]: I1009 09:18:29.825942 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t2vs7" Oct 09 09:18:30 crc kubenswrapper[4710]: I1009 09:18:30.239118 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t2vs7"] Oct 09 09:18:30 crc kubenswrapper[4710]: I1009 09:18:30.870928 4710 generic.go:334] "Generic (PLEG): container finished" podID="c94bfc3d-c4f1-40a4-ae33-acb235d1df82" containerID="99eeb8f61dabc51b13dcedbb86a94d969cf955c346f55c6de1d2cac8df708064" exitCode=0 Oct 09 09:18:30 crc kubenswrapper[4710]: I1009 09:18:30.871030 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t2vs7" event={"ID":"c94bfc3d-c4f1-40a4-ae33-acb235d1df82","Type":"ContainerDied","Data":"99eeb8f61dabc51b13dcedbb86a94d969cf955c346f55c6de1d2cac8df708064"} Oct 09 09:18:30 crc kubenswrapper[4710]: I1009 09:18:30.871246 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t2vs7" event={"ID":"c94bfc3d-c4f1-40a4-ae33-acb235d1df82","Type":"ContainerStarted","Data":"95dff896a0a3323ffb886a519a859b5ec401a0fb4da909f3062ceadb66fa0e26"} Oct 09 09:18:31 crc kubenswrapper[4710]: I1009 09:18:31.882531 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t2vs7" event={"ID":"c94bfc3d-c4f1-40a4-ae33-acb235d1df82","Type":"ContainerStarted","Data":"08b2e37fa70ffbf113ddbb571570ef4318317bc749c16e2b817c4ba0c55ea52a"} Oct 09 09:18:33 crc kubenswrapper[4710]: I1009 09:18:33.896916 4710 generic.go:334] "Generic (PLEG): container finished" podID="c94bfc3d-c4f1-40a4-ae33-acb235d1df82" containerID="08b2e37fa70ffbf113ddbb571570ef4318317bc749c16e2b817c4ba0c55ea52a" exitCode=0 Oct 09 09:18:33 crc kubenswrapper[4710]: I1009 09:18:33.896984 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t2vs7" event={"ID":"c94bfc3d-c4f1-40a4-ae33-acb235d1df82","Type":"ContainerDied","Data":"08b2e37fa70ffbf113ddbb571570ef4318317bc749c16e2b817c4ba0c55ea52a"} Oct 09 09:18:34 crc kubenswrapper[4710]: I1009 09:18:34.905594 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t2vs7" event={"ID":"c94bfc3d-c4f1-40a4-ae33-acb235d1df82","Type":"ContainerStarted","Data":"214d9f031293a3baf8b50e4039922949659742cf26e41b7e13d4ffe40b4b97f5"} Oct 09 09:18:34 crc kubenswrapper[4710]: I1009 09:18:34.923567 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-t2vs7" podStartSLOduration=2.372427493 podStartE2EDuration="5.923548472s" podCreationTimestamp="2025-10-09 09:18:29 +0000 UTC" firstStartedPulling="2025-10-09 09:18:30.872945004 +0000 UTC m=+834.363053401" lastFinishedPulling="2025-10-09 09:18:34.424065983 +0000 UTC m=+837.914174380" observedRunningTime="2025-10-09 09:18:34.919339755 +0000 UTC m=+838.409448153" watchObservedRunningTime="2025-10-09 09:18:34.923548472 +0000 UTC m=+838.413656869" Oct 09 09:18:39 crc kubenswrapper[4710]: I1009 09:18:39.827086 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-t2vs7" Oct 09 09:18:39 crc kubenswrapper[4710]: I1009 09:18:39.827664 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-t2vs7" Oct 09 09:18:39 crc kubenswrapper[4710]: I1009 09:18:39.865418 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-t2vs7" Oct 09 09:18:39 crc kubenswrapper[4710]: I1009 09:18:39.979115 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-t2vs7" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.403390 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7bfcb9d745-trlqp"] Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.405111 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bfcb9d745-trlqp" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.409004 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.416165 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.416274 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-pxl4c" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.416544 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.430181 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bfcb9d745-trlqp"] Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.455728 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-758b79db4c-tvwcb"] Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.456997 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-758b79db4c-tvwcb" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.468878 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.476468 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-758b79db4c-tvwcb"] Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.495777 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7324cbbc-2a78-433d-9be4-65579c637a99-config\") pod \"dnsmasq-dns-7bfcb9d745-trlqp\" (UID: \"7324cbbc-2a78-433d-9be4-65579c637a99\") " pod="openstack/dnsmasq-dns-7bfcb9d745-trlqp" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.495812 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zd7l\" (UniqueName: \"kubernetes.io/projected/b235be2d-aaf9-45a0-8984-dc00f8b4d77e-kube-api-access-5zd7l\") pod \"dnsmasq-dns-758b79db4c-tvwcb\" (UID: \"b235be2d-aaf9-45a0-8984-dc00f8b4d77e\") " pod="openstack/dnsmasq-dns-758b79db4c-tvwcb" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.495914 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsjqz\" (UniqueName: \"kubernetes.io/projected/7324cbbc-2a78-433d-9be4-65579c637a99-kube-api-access-vsjqz\") pod \"dnsmasq-dns-7bfcb9d745-trlqp\" (UID: \"7324cbbc-2a78-433d-9be4-65579c637a99\") " pod="openstack/dnsmasq-dns-7bfcb9d745-trlqp" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.495949 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b235be2d-aaf9-45a0-8984-dc00f8b4d77e-config\") pod \"dnsmasq-dns-758b79db4c-tvwcb\" (UID: \"b235be2d-aaf9-45a0-8984-dc00f8b4d77e\") " pod="openstack/dnsmasq-dns-758b79db4c-tvwcb" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.495994 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b235be2d-aaf9-45a0-8984-dc00f8b4d77e-dns-svc\") pod \"dnsmasq-dns-758b79db4c-tvwcb\" (UID: \"b235be2d-aaf9-45a0-8984-dc00f8b4d77e\") " pod="openstack/dnsmasq-dns-758b79db4c-tvwcb" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.597668 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7324cbbc-2a78-433d-9be4-65579c637a99-config\") pod \"dnsmasq-dns-7bfcb9d745-trlqp\" (UID: \"7324cbbc-2a78-433d-9be4-65579c637a99\") " pod="openstack/dnsmasq-dns-7bfcb9d745-trlqp" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.597850 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zd7l\" (UniqueName: \"kubernetes.io/projected/b235be2d-aaf9-45a0-8984-dc00f8b4d77e-kube-api-access-5zd7l\") pod \"dnsmasq-dns-758b79db4c-tvwcb\" (UID: \"b235be2d-aaf9-45a0-8984-dc00f8b4d77e\") " pod="openstack/dnsmasq-dns-758b79db4c-tvwcb" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.597974 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsjqz\" (UniqueName: \"kubernetes.io/projected/7324cbbc-2a78-433d-9be4-65579c637a99-kube-api-access-vsjqz\") pod \"dnsmasq-dns-7bfcb9d745-trlqp\" (UID: \"7324cbbc-2a78-433d-9be4-65579c637a99\") " pod="openstack/dnsmasq-dns-7bfcb9d745-trlqp" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.598073 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b235be2d-aaf9-45a0-8984-dc00f8b4d77e-config\") pod \"dnsmasq-dns-758b79db4c-tvwcb\" (UID: \"b235be2d-aaf9-45a0-8984-dc00f8b4d77e\") " pod="openstack/dnsmasq-dns-758b79db4c-tvwcb" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.598188 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b235be2d-aaf9-45a0-8984-dc00f8b4d77e-dns-svc\") pod \"dnsmasq-dns-758b79db4c-tvwcb\" (UID: \"b235be2d-aaf9-45a0-8984-dc00f8b4d77e\") " pod="openstack/dnsmasq-dns-758b79db4c-tvwcb" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.599556 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7324cbbc-2a78-433d-9be4-65579c637a99-config\") pod \"dnsmasq-dns-7bfcb9d745-trlqp\" (UID: \"7324cbbc-2a78-433d-9be4-65579c637a99\") " pod="openstack/dnsmasq-dns-7bfcb9d745-trlqp" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.600502 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b235be2d-aaf9-45a0-8984-dc00f8b4d77e-config\") pod \"dnsmasq-dns-758b79db4c-tvwcb\" (UID: \"b235be2d-aaf9-45a0-8984-dc00f8b4d77e\") " pod="openstack/dnsmasq-dns-758b79db4c-tvwcb" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.600588 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b235be2d-aaf9-45a0-8984-dc00f8b4d77e-dns-svc\") pod \"dnsmasq-dns-758b79db4c-tvwcb\" (UID: \"b235be2d-aaf9-45a0-8984-dc00f8b4d77e\") " pod="openstack/dnsmasq-dns-758b79db4c-tvwcb" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.618131 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsjqz\" (UniqueName: \"kubernetes.io/projected/7324cbbc-2a78-433d-9be4-65579c637a99-kube-api-access-vsjqz\") pod \"dnsmasq-dns-7bfcb9d745-trlqp\" (UID: \"7324cbbc-2a78-433d-9be4-65579c637a99\") " pod="openstack/dnsmasq-dns-7bfcb9d745-trlqp" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.621363 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zd7l\" (UniqueName: \"kubernetes.io/projected/b235be2d-aaf9-45a0-8984-dc00f8b4d77e-kube-api-access-5zd7l\") pod \"dnsmasq-dns-758b79db4c-tvwcb\" (UID: \"b235be2d-aaf9-45a0-8984-dc00f8b4d77e\") " pod="openstack/dnsmasq-dns-758b79db4c-tvwcb" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.722371 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bfcb9d745-trlqp" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.775532 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-758b79db4c-tvwcb" Oct 09 09:18:40 crc kubenswrapper[4710]: I1009 09:18:40.998061 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-t2vs7"] Oct 09 09:18:41 crc kubenswrapper[4710]: I1009 09:18:41.157372 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bfcb9d745-trlqp"] Oct 09 09:18:41 crc kubenswrapper[4710]: W1009 09:18:41.164813 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7324cbbc_2a78_433d_9be4_65579c637a99.slice/crio-028e7879016a3f5258f058ee8f8a404955ae707f1e97324772c06baf4483605c WatchSource:0}: Error finding container 028e7879016a3f5258f058ee8f8a404955ae707f1e97324772c06baf4483605c: Status 404 returned error can't find the container with id 028e7879016a3f5258f058ee8f8a404955ae707f1e97324772c06baf4483605c Oct 09 09:18:41 crc kubenswrapper[4710]: W1009 09:18:41.251373 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb235be2d_aaf9_45a0_8984_dc00f8b4d77e.slice/crio-d951a231b596e0d55ea42ce64c6fd9d96d8a57fb8293d98d814eb5fd16e4bb97 WatchSource:0}: Error finding container d951a231b596e0d55ea42ce64c6fd9d96d8a57fb8293d98d814eb5fd16e4bb97: Status 404 returned error can't find the container with id d951a231b596e0d55ea42ce64c6fd9d96d8a57fb8293d98d814eb5fd16e4bb97 Oct 09 09:18:41 crc kubenswrapper[4710]: I1009 09:18:41.252994 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-758b79db4c-tvwcb"] Oct 09 09:18:41 crc kubenswrapper[4710]: I1009 09:18:41.954585 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-758b79db4c-tvwcb" event={"ID":"b235be2d-aaf9-45a0-8984-dc00f8b4d77e","Type":"ContainerStarted","Data":"d951a231b596e0d55ea42ce64c6fd9d96d8a57fb8293d98d814eb5fd16e4bb97"} Oct 09 09:18:41 crc kubenswrapper[4710]: I1009 09:18:41.956647 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bfcb9d745-trlqp" event={"ID":"7324cbbc-2a78-433d-9be4-65579c637a99","Type":"ContainerStarted","Data":"028e7879016a3f5258f058ee8f8a404955ae707f1e97324772c06baf4483605c"} Oct 09 09:18:41 crc kubenswrapper[4710]: I1009 09:18:41.957100 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-t2vs7" podUID="c94bfc3d-c4f1-40a4-ae33-acb235d1df82" containerName="registry-server" containerID="cri-o://214d9f031293a3baf8b50e4039922949659742cf26e41b7e13d4ffe40b4b97f5" gracePeriod=2 Oct 09 09:18:42 crc kubenswrapper[4710]: I1009 09:18:42.372003 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t2vs7" Oct 09 09:18:42 crc kubenswrapper[4710]: I1009 09:18:42.428840 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c94bfc3d-c4f1-40a4-ae33-acb235d1df82-catalog-content\") pod \"c94bfc3d-c4f1-40a4-ae33-acb235d1df82\" (UID: \"c94bfc3d-c4f1-40a4-ae33-acb235d1df82\") " Oct 09 09:18:42 crc kubenswrapper[4710]: I1009 09:18:42.429071 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rncnc\" (UniqueName: \"kubernetes.io/projected/c94bfc3d-c4f1-40a4-ae33-acb235d1df82-kube-api-access-rncnc\") pod \"c94bfc3d-c4f1-40a4-ae33-acb235d1df82\" (UID: \"c94bfc3d-c4f1-40a4-ae33-acb235d1df82\") " Oct 09 09:18:42 crc kubenswrapper[4710]: I1009 09:18:42.429128 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c94bfc3d-c4f1-40a4-ae33-acb235d1df82-utilities\") pod \"c94bfc3d-c4f1-40a4-ae33-acb235d1df82\" (UID: \"c94bfc3d-c4f1-40a4-ae33-acb235d1df82\") " Oct 09 09:18:42 crc kubenswrapper[4710]: I1009 09:18:42.433464 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c94bfc3d-c4f1-40a4-ae33-acb235d1df82-utilities" (OuterVolumeSpecName: "utilities") pod "c94bfc3d-c4f1-40a4-ae33-acb235d1df82" (UID: "c94bfc3d-c4f1-40a4-ae33-acb235d1df82"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:18:42 crc kubenswrapper[4710]: I1009 09:18:42.458489 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c94bfc3d-c4f1-40a4-ae33-acb235d1df82-kube-api-access-rncnc" (OuterVolumeSpecName: "kube-api-access-rncnc") pod "c94bfc3d-c4f1-40a4-ae33-acb235d1df82" (UID: "c94bfc3d-c4f1-40a4-ae33-acb235d1df82"). InnerVolumeSpecName "kube-api-access-rncnc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:18:42 crc kubenswrapper[4710]: I1009 09:18:42.495840 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c94bfc3d-c4f1-40a4-ae33-acb235d1df82-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c94bfc3d-c4f1-40a4-ae33-acb235d1df82" (UID: "c94bfc3d-c4f1-40a4-ae33-acb235d1df82"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:18:42 crc kubenswrapper[4710]: I1009 09:18:42.532642 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c94bfc3d-c4f1-40a4-ae33-acb235d1df82-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:18:42 crc kubenswrapper[4710]: I1009 09:18:42.532683 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rncnc\" (UniqueName: \"kubernetes.io/projected/c94bfc3d-c4f1-40a4-ae33-acb235d1df82-kube-api-access-rncnc\") on node \"crc\" DevicePath \"\"" Oct 09 09:18:42 crc kubenswrapper[4710]: I1009 09:18:42.532699 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c94bfc3d-c4f1-40a4-ae33-acb235d1df82-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:18:42 crc kubenswrapper[4710]: I1009 09:18:42.967194 4710 generic.go:334] "Generic (PLEG): container finished" podID="c94bfc3d-c4f1-40a4-ae33-acb235d1df82" containerID="214d9f031293a3baf8b50e4039922949659742cf26e41b7e13d4ffe40b4b97f5" exitCode=0 Oct 09 09:18:42 crc kubenswrapper[4710]: I1009 09:18:42.967284 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t2vs7" Oct 09 09:18:42 crc kubenswrapper[4710]: I1009 09:18:42.967302 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t2vs7" event={"ID":"c94bfc3d-c4f1-40a4-ae33-acb235d1df82","Type":"ContainerDied","Data":"214d9f031293a3baf8b50e4039922949659742cf26e41b7e13d4ffe40b4b97f5"} Oct 09 09:18:42 crc kubenswrapper[4710]: I1009 09:18:42.967628 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t2vs7" event={"ID":"c94bfc3d-c4f1-40a4-ae33-acb235d1df82","Type":"ContainerDied","Data":"95dff896a0a3323ffb886a519a859b5ec401a0fb4da909f3062ceadb66fa0e26"} Oct 09 09:18:42 crc kubenswrapper[4710]: I1009 09:18:42.967654 4710 scope.go:117] "RemoveContainer" containerID="214d9f031293a3baf8b50e4039922949659742cf26e41b7e13d4ffe40b4b97f5" Oct 09 09:18:42 crc kubenswrapper[4710]: I1009 09:18:42.991367 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-t2vs7"] Oct 09 09:18:42 crc kubenswrapper[4710]: I1009 09:18:42.996170 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-t2vs7"] Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.005450 4710 scope.go:117] "RemoveContainer" containerID="08b2e37fa70ffbf113ddbb571570ef4318317bc749c16e2b817c4ba0c55ea52a" Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.025209 4710 scope.go:117] "RemoveContainer" containerID="99eeb8f61dabc51b13dcedbb86a94d969cf955c346f55c6de1d2cac8df708064" Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.086294 4710 scope.go:117] "RemoveContainer" containerID="214d9f031293a3baf8b50e4039922949659742cf26e41b7e13d4ffe40b4b97f5" Oct 09 09:18:43 crc kubenswrapper[4710]: E1009 09:18:43.087006 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"214d9f031293a3baf8b50e4039922949659742cf26e41b7e13d4ffe40b4b97f5\": container with ID starting with 214d9f031293a3baf8b50e4039922949659742cf26e41b7e13d4ffe40b4b97f5 not found: ID does not exist" containerID="214d9f031293a3baf8b50e4039922949659742cf26e41b7e13d4ffe40b4b97f5" Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.087056 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"214d9f031293a3baf8b50e4039922949659742cf26e41b7e13d4ffe40b4b97f5"} err="failed to get container status \"214d9f031293a3baf8b50e4039922949659742cf26e41b7e13d4ffe40b4b97f5\": rpc error: code = NotFound desc = could not find container \"214d9f031293a3baf8b50e4039922949659742cf26e41b7e13d4ffe40b4b97f5\": container with ID starting with 214d9f031293a3baf8b50e4039922949659742cf26e41b7e13d4ffe40b4b97f5 not found: ID does not exist" Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.087087 4710 scope.go:117] "RemoveContainer" containerID="08b2e37fa70ffbf113ddbb571570ef4318317bc749c16e2b817c4ba0c55ea52a" Oct 09 09:18:43 crc kubenswrapper[4710]: E1009 09:18:43.087559 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08b2e37fa70ffbf113ddbb571570ef4318317bc749c16e2b817c4ba0c55ea52a\": container with ID starting with 08b2e37fa70ffbf113ddbb571570ef4318317bc749c16e2b817c4ba0c55ea52a not found: ID does not exist" containerID="08b2e37fa70ffbf113ddbb571570ef4318317bc749c16e2b817c4ba0c55ea52a" Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.087591 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08b2e37fa70ffbf113ddbb571570ef4318317bc749c16e2b817c4ba0c55ea52a"} err="failed to get container status \"08b2e37fa70ffbf113ddbb571570ef4318317bc749c16e2b817c4ba0c55ea52a\": rpc error: code = NotFound desc = could not find container \"08b2e37fa70ffbf113ddbb571570ef4318317bc749c16e2b817c4ba0c55ea52a\": container with ID starting with 08b2e37fa70ffbf113ddbb571570ef4318317bc749c16e2b817c4ba0c55ea52a not found: ID does not exist" Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.087614 4710 scope.go:117] "RemoveContainer" containerID="99eeb8f61dabc51b13dcedbb86a94d969cf955c346f55c6de1d2cac8df708064" Oct 09 09:18:43 crc kubenswrapper[4710]: E1009 09:18:43.088197 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99eeb8f61dabc51b13dcedbb86a94d969cf955c346f55c6de1d2cac8df708064\": container with ID starting with 99eeb8f61dabc51b13dcedbb86a94d969cf955c346f55c6de1d2cac8df708064 not found: ID does not exist" containerID="99eeb8f61dabc51b13dcedbb86a94d969cf955c346f55c6de1d2cac8df708064" Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.088219 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99eeb8f61dabc51b13dcedbb86a94d969cf955c346f55c6de1d2cac8df708064"} err="failed to get container status \"99eeb8f61dabc51b13dcedbb86a94d969cf955c346f55c6de1d2cac8df708064\": rpc error: code = NotFound desc = could not find container \"99eeb8f61dabc51b13dcedbb86a94d969cf955c346f55c6de1d2cac8df708064\": container with ID starting with 99eeb8f61dabc51b13dcedbb86a94d969cf955c346f55c6de1d2cac8df708064 not found: ID does not exist" Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.813723 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-758b79db4c-tvwcb"] Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.844622 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-644597f84c-ldfb2"] Oct 09 09:18:43 crc kubenswrapper[4710]: E1009 09:18:43.844873 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c94bfc3d-c4f1-40a4-ae33-acb235d1df82" containerName="extract-utilities" Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.844892 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="c94bfc3d-c4f1-40a4-ae33-acb235d1df82" containerName="extract-utilities" Oct 09 09:18:43 crc kubenswrapper[4710]: E1009 09:18:43.844913 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c94bfc3d-c4f1-40a4-ae33-acb235d1df82" containerName="extract-content" Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.844920 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="c94bfc3d-c4f1-40a4-ae33-acb235d1df82" containerName="extract-content" Oct 09 09:18:43 crc kubenswrapper[4710]: E1009 09:18:43.844930 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c94bfc3d-c4f1-40a4-ae33-acb235d1df82" containerName="registry-server" Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.844935 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="c94bfc3d-c4f1-40a4-ae33-acb235d1df82" containerName="registry-server" Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.845076 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="c94bfc3d-c4f1-40a4-ae33-acb235d1df82" containerName="registry-server" Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.845721 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-644597f84c-ldfb2" Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.856616 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8pxl\" (UniqueName: \"kubernetes.io/projected/38070298-7793-4fda-adaf-471d64d2b023-kube-api-access-h8pxl\") pod \"dnsmasq-dns-644597f84c-ldfb2\" (UID: \"38070298-7793-4fda-adaf-471d64d2b023\") " pod="openstack/dnsmasq-dns-644597f84c-ldfb2" Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.856686 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38070298-7793-4fda-adaf-471d64d2b023-config\") pod \"dnsmasq-dns-644597f84c-ldfb2\" (UID: \"38070298-7793-4fda-adaf-471d64d2b023\") " pod="openstack/dnsmasq-dns-644597f84c-ldfb2" Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.856707 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38070298-7793-4fda-adaf-471d64d2b023-dns-svc\") pod \"dnsmasq-dns-644597f84c-ldfb2\" (UID: \"38070298-7793-4fda-adaf-471d64d2b023\") " pod="openstack/dnsmasq-dns-644597f84c-ldfb2" Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.858588 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-644597f84c-ldfb2"] Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.958044 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8pxl\" (UniqueName: \"kubernetes.io/projected/38070298-7793-4fda-adaf-471d64d2b023-kube-api-access-h8pxl\") pod \"dnsmasq-dns-644597f84c-ldfb2\" (UID: \"38070298-7793-4fda-adaf-471d64d2b023\") " pod="openstack/dnsmasq-dns-644597f84c-ldfb2" Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.959156 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38070298-7793-4fda-adaf-471d64d2b023-config\") pod \"dnsmasq-dns-644597f84c-ldfb2\" (UID: \"38070298-7793-4fda-adaf-471d64d2b023\") " pod="openstack/dnsmasq-dns-644597f84c-ldfb2" Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.959186 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38070298-7793-4fda-adaf-471d64d2b023-dns-svc\") pod \"dnsmasq-dns-644597f84c-ldfb2\" (UID: \"38070298-7793-4fda-adaf-471d64d2b023\") " pod="openstack/dnsmasq-dns-644597f84c-ldfb2" Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.959997 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38070298-7793-4fda-adaf-471d64d2b023-dns-svc\") pod \"dnsmasq-dns-644597f84c-ldfb2\" (UID: \"38070298-7793-4fda-adaf-471d64d2b023\") " pod="openstack/dnsmasq-dns-644597f84c-ldfb2" Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.960509 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38070298-7793-4fda-adaf-471d64d2b023-config\") pod \"dnsmasq-dns-644597f84c-ldfb2\" (UID: \"38070298-7793-4fda-adaf-471d64d2b023\") " pod="openstack/dnsmasq-dns-644597f84c-ldfb2" Oct 09 09:18:43 crc kubenswrapper[4710]: I1009 09:18:43.988486 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8pxl\" (UniqueName: \"kubernetes.io/projected/38070298-7793-4fda-adaf-471d64d2b023-kube-api-access-h8pxl\") pod \"dnsmasq-dns-644597f84c-ldfb2\" (UID: \"38070298-7793-4fda-adaf-471d64d2b023\") " pod="openstack/dnsmasq-dns-644597f84c-ldfb2" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.120043 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bfcb9d745-trlqp"] Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.139857 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77597f887-gzssh"] Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.140897 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77597f887-gzssh" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.156871 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77597f887-gzssh"] Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.164752 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-644597f84c-ldfb2" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.166679 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4ddf393-082f-4ea4-9644-c6e03236fc02-config\") pod \"dnsmasq-dns-77597f887-gzssh\" (UID: \"c4ddf393-082f-4ea4-9644-c6e03236fc02\") " pod="openstack/dnsmasq-dns-77597f887-gzssh" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.166714 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cg6ft\" (UniqueName: \"kubernetes.io/projected/c4ddf393-082f-4ea4-9644-c6e03236fc02-kube-api-access-cg6ft\") pod \"dnsmasq-dns-77597f887-gzssh\" (UID: \"c4ddf393-082f-4ea4-9644-c6e03236fc02\") " pod="openstack/dnsmasq-dns-77597f887-gzssh" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.166818 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4ddf393-082f-4ea4-9644-c6e03236fc02-dns-svc\") pod \"dnsmasq-dns-77597f887-gzssh\" (UID: \"c4ddf393-082f-4ea4-9644-c6e03236fc02\") " pod="openstack/dnsmasq-dns-77597f887-gzssh" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.268074 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4ddf393-082f-4ea4-9644-c6e03236fc02-config\") pod \"dnsmasq-dns-77597f887-gzssh\" (UID: \"c4ddf393-082f-4ea4-9644-c6e03236fc02\") " pod="openstack/dnsmasq-dns-77597f887-gzssh" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.268113 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cg6ft\" (UniqueName: \"kubernetes.io/projected/c4ddf393-082f-4ea4-9644-c6e03236fc02-kube-api-access-cg6ft\") pod \"dnsmasq-dns-77597f887-gzssh\" (UID: \"c4ddf393-082f-4ea4-9644-c6e03236fc02\") " pod="openstack/dnsmasq-dns-77597f887-gzssh" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.268200 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4ddf393-082f-4ea4-9644-c6e03236fc02-dns-svc\") pod \"dnsmasq-dns-77597f887-gzssh\" (UID: \"c4ddf393-082f-4ea4-9644-c6e03236fc02\") " pod="openstack/dnsmasq-dns-77597f887-gzssh" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.269059 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4ddf393-082f-4ea4-9644-c6e03236fc02-dns-svc\") pod \"dnsmasq-dns-77597f887-gzssh\" (UID: \"c4ddf393-082f-4ea4-9644-c6e03236fc02\") " pod="openstack/dnsmasq-dns-77597f887-gzssh" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.269105 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4ddf393-082f-4ea4-9644-c6e03236fc02-config\") pod \"dnsmasq-dns-77597f887-gzssh\" (UID: \"c4ddf393-082f-4ea4-9644-c6e03236fc02\") " pod="openstack/dnsmasq-dns-77597f887-gzssh" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.290198 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cg6ft\" (UniqueName: \"kubernetes.io/projected/c4ddf393-082f-4ea4-9644-c6e03236fc02-kube-api-access-cg6ft\") pod \"dnsmasq-dns-77597f887-gzssh\" (UID: \"c4ddf393-082f-4ea4-9644-c6e03236fc02\") " pod="openstack/dnsmasq-dns-77597f887-gzssh" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.464645 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77597f887-gzssh" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.825423 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c94bfc3d-c4f1-40a4-ae33-acb235d1df82" path="/var/lib/kubelet/pods/c94bfc3d-c4f1-40a4-ae33-acb235d1df82/volumes" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.826683 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-644597f84c-ldfb2"] Oct 09 09:18:44 crc kubenswrapper[4710]: W1009 09:18:44.828620 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38070298_7793_4fda_adaf_471d64d2b023.slice/crio-7060b54fb486f01017a7a3e2cf2f09cc7c7d094f9725f993f2ddcbae94b14adb WatchSource:0}: Error finding container 7060b54fb486f01017a7a3e2cf2f09cc7c7d094f9725f993f2ddcbae94b14adb: Status 404 returned error can't find the container with id 7060b54fb486f01017a7a3e2cf2f09cc7c7d094f9725f993f2ddcbae94b14adb Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.906477 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77597f887-gzssh"] Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.957169 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.958255 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.964193 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-7c4c4" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.964534 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.964642 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.964725 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.964799 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.964858 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.965551 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.969739 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.990220 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b54a6a74-e8d4-4e48-b7dc-7805027dce53-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.990295 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.990332 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b54a6a74-e8d4-4e48-b7dc-7805027dce53-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.990366 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b54a6a74-e8d4-4e48-b7dc-7805027dce53-config-data\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.990387 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b54a6a74-e8d4-4e48-b7dc-7805027dce53-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.990405 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.990451 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.990476 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b54a6a74-e8d4-4e48-b7dc-7805027dce53-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.990523 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.990557 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7q7c\" (UniqueName: \"kubernetes.io/projected/b54a6a74-e8d4-4e48-b7dc-7805027dce53-kube-api-access-r7q7c\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:44 crc kubenswrapper[4710]: I1009 09:18:44.990578 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.028613 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-644597f84c-ldfb2" event={"ID":"38070298-7793-4fda-adaf-471d64d2b023","Type":"ContainerStarted","Data":"7060b54fb486f01017a7a3e2cf2f09cc7c7d094f9725f993f2ddcbae94b14adb"} Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.031797 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77597f887-gzssh" event={"ID":"c4ddf393-082f-4ea4-9644-c6e03236fc02","Type":"ContainerStarted","Data":"f31f5c8de109834ad3615e36ded053a67b812a82b035ebd9c2413efec2e87034"} Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.091424 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.091507 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b54a6a74-e8d4-4e48-b7dc-7805027dce53-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.091560 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.091903 4710 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.092660 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b54a6a74-e8d4-4e48-b7dc-7805027dce53-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.093553 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7q7c\" (UniqueName: \"kubernetes.io/projected/b54a6a74-e8d4-4e48-b7dc-7805027dce53-kube-api-access-r7q7c\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.093601 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.093687 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b54a6a74-e8d4-4e48-b7dc-7805027dce53-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.093780 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.093840 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b54a6a74-e8d4-4e48-b7dc-7805027dce53-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.093890 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b54a6a74-e8d4-4e48-b7dc-7805027dce53-config-data\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.093922 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b54a6a74-e8d4-4e48-b7dc-7805027dce53-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.093943 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.094910 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.095378 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b54a6a74-e8d4-4e48-b7dc-7805027dce53-config-data\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.095682 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b54a6a74-e8d4-4e48-b7dc-7805027dce53-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.096706 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.100560 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b54a6a74-e8d4-4e48-b7dc-7805027dce53-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.101064 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b54a6a74-e8d4-4e48-b7dc-7805027dce53-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.101205 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.101683 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.111545 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7q7c\" (UniqueName: \"kubernetes.io/projected/b54a6a74-e8d4-4e48-b7dc-7805027dce53-kube-api-access-r7q7c\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.121307 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.286662 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.291529 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.292381 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.301486 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.301897 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.302322 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.302339 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.302369 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.302685 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.302850 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-rcfnn" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.316863 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.398697 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/66a04a07-2cc5-4549-9217-d5fbb82a6755-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.399281 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/66a04a07-2cc5-4549-9217-d5fbb82a6755-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.399308 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9drnb\" (UniqueName: \"kubernetes.io/projected/66a04a07-2cc5-4549-9217-d5fbb82a6755-kube-api-access-9drnb\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.399343 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.399476 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.399537 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.399567 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.399608 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.399756 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/66a04a07-2cc5-4549-9217-d5fbb82a6755-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.400002 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/66a04a07-2cc5-4549-9217-d5fbb82a6755-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.400031 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/66a04a07-2cc5-4549-9217-d5fbb82a6755-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.502981 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9drnb\" (UniqueName: \"kubernetes.io/projected/66a04a07-2cc5-4549-9217-d5fbb82a6755-kube-api-access-9drnb\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.503068 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/66a04a07-2cc5-4549-9217-d5fbb82a6755-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.503130 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.503200 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.503292 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.503318 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.503374 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.503516 4710 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.504664 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/66a04a07-2cc5-4549-9217-d5fbb82a6755-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.504779 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/66a04a07-2cc5-4549-9217-d5fbb82a6755-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.504833 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.504871 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.505125 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/66a04a07-2cc5-4549-9217-d5fbb82a6755-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.505182 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/66a04a07-2cc5-4549-9217-d5fbb82a6755-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.505262 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/66a04a07-2cc5-4549-9217-d5fbb82a6755-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.505965 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/66a04a07-2cc5-4549-9217-d5fbb82a6755-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.506777 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/66a04a07-2cc5-4549-9217-d5fbb82a6755-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.514529 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/66a04a07-2cc5-4549-9217-d5fbb82a6755-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.519298 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.526277 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9drnb\" (UniqueName: \"kubernetes.io/projected/66a04a07-2cc5-4549-9217-d5fbb82a6755-kube-api-access-9drnb\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.531373 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.539083 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/66a04a07-2cc5-4549-9217-d5fbb82a6755-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.545639 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.671483 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:18:45 crc kubenswrapper[4710]: I1009 09:18:45.740731 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 09:18:46 crc kubenswrapper[4710]: I1009 09:18:46.152780 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.767277 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.768689 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.777542 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.778547 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.778739 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.779088 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-cgfkj" Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.779247 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.781151 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.794794 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.903317 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.904984 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.906881 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.907189 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.907333 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-n5f69" Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.907535 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.907652 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.953713 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bc62be28-ccca-4b4b-b7d0-afabccec4047-config-data-generated\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.953755 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc62be28-ccca-4b4b-b7d0-afabccec4047-operator-scripts\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.953795 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.953847 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc62be28-ccca-4b4b-b7d0-afabccec4047-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.953884 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hrgt\" (UniqueName: \"kubernetes.io/projected/bc62be28-ccca-4b4b-b7d0-afabccec4047-kube-api-access-6hrgt\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.953985 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bc62be28-ccca-4b4b-b7d0-afabccec4047-config-data-default\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.954025 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/bc62be28-ccca-4b4b-b7d0-afabccec4047-secrets\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.954054 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc62be28-ccca-4b4b-b7d0-afabccec4047-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:47 crc kubenswrapper[4710]: I1009 09:18:47.954095 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bc62be28-ccca-4b4b-b7d0-afabccec4047-kolla-config\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.057458 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc62be28-ccca-4b4b-b7d0-afabccec4047-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.057546 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hrgt\" (UniqueName: \"kubernetes.io/projected/bc62be28-ccca-4b4b-b7d0-afabccec4047-kube-api-access-6hrgt\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.057617 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57584eef-cc17-41b8-a307-663a50cbf568-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.057650 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/57584eef-cc17-41b8-a307-663a50cbf568-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.057709 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/57584eef-cc17-41b8-a307-663a50cbf568-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.057742 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bc62be28-ccca-4b4b-b7d0-afabccec4047-config-data-default\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.057787 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/57584eef-cc17-41b8-a307-663a50cbf568-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.057818 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/bc62be28-ccca-4b4b-b7d0-afabccec4047-secrets\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.057862 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/57584eef-cc17-41b8-a307-663a50cbf568-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.057885 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/57584eef-cc17-41b8-a307-663a50cbf568-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.057904 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc62be28-ccca-4b4b-b7d0-afabccec4047-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.057948 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.057975 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bc62be28-ccca-4b4b-b7d0-afabccec4047-kolla-config\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.058017 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bc62be28-ccca-4b4b-b7d0-afabccec4047-config-data-generated\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.058037 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc62be28-ccca-4b4b-b7d0-afabccec4047-operator-scripts\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.058065 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.058126 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcgt2\" (UniqueName: \"kubernetes.io/projected/57584eef-cc17-41b8-a307-663a50cbf568-kube-api-access-kcgt2\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.058149 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/57584eef-cc17-41b8-a307-663a50cbf568-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.059014 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/bc62be28-ccca-4b4b-b7d0-afabccec4047-kolla-config\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.059105 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/bc62be28-ccca-4b4b-b7d0-afabccec4047-config-data-generated\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.059592 4710 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/openstack-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.059965 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc62be28-ccca-4b4b-b7d0-afabccec4047-operator-scripts\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.060197 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/bc62be28-ccca-4b4b-b7d0-afabccec4047-config-data-default\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.074621 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/bc62be28-ccca-4b4b-b7d0-afabccec4047-secrets\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.074752 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/bc62be28-ccca-4b4b-b7d0-afabccec4047-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.082545 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.084139 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hrgt\" (UniqueName: \"kubernetes.io/projected/bc62be28-ccca-4b4b-b7d0-afabccec4047-kube-api-access-6hrgt\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.084884 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc62be28-ccca-4b4b-b7d0-afabccec4047-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"bc62be28-ccca-4b4b-b7d0-afabccec4047\") " pod="openstack/openstack-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.104128 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.161089 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57584eef-cc17-41b8-a307-663a50cbf568-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.161154 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/57584eef-cc17-41b8-a307-663a50cbf568-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.161190 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/57584eef-cc17-41b8-a307-663a50cbf568-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.161243 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/57584eef-cc17-41b8-a307-663a50cbf568-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.161274 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/57584eef-cc17-41b8-a307-663a50cbf568-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.161308 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/57584eef-cc17-41b8-a307-663a50cbf568-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.161330 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.161382 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcgt2\" (UniqueName: \"kubernetes.io/projected/57584eef-cc17-41b8-a307-663a50cbf568-kube-api-access-kcgt2\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.161402 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/57584eef-cc17-41b8-a307-663a50cbf568-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.161919 4710 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.166165 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/57584eef-cc17-41b8-a307-663a50cbf568-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.176514 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/57584eef-cc17-41b8-a307-663a50cbf568-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.176683 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/57584eef-cc17-41b8-a307-663a50cbf568-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.176851 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/57584eef-cc17-41b8-a307-663a50cbf568-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.177146 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/57584eef-cc17-41b8-a307-663a50cbf568-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.177191 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57584eef-cc17-41b8-a307-663a50cbf568-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.186239 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/57584eef-cc17-41b8-a307-663a50cbf568-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.192225 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcgt2\" (UniqueName: \"kubernetes.io/projected/57584eef-cc17-41b8-a307-663a50cbf568-kube-api-access-kcgt2\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.199269 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"57584eef-cc17-41b8-a307-663a50cbf568\") " pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.224600 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.342853 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.344876 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.358920 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.359286 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.359491 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.360117 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-ltxt8" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.465750 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jk784\" (UniqueName: \"kubernetes.io/projected/163b8a88-dc31-4540-a39b-bfecc81ce8aa-kube-api-access-jk784\") pod \"memcached-0\" (UID: \"163b8a88-dc31-4540-a39b-bfecc81ce8aa\") " pod="openstack/memcached-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.465839 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/163b8a88-dc31-4540-a39b-bfecc81ce8aa-kolla-config\") pod \"memcached-0\" (UID: \"163b8a88-dc31-4540-a39b-bfecc81ce8aa\") " pod="openstack/memcached-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.465970 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/163b8a88-dc31-4540-a39b-bfecc81ce8aa-config-data\") pod \"memcached-0\" (UID: \"163b8a88-dc31-4540-a39b-bfecc81ce8aa\") " pod="openstack/memcached-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.465993 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/163b8a88-dc31-4540-a39b-bfecc81ce8aa-memcached-tls-certs\") pod \"memcached-0\" (UID: \"163b8a88-dc31-4540-a39b-bfecc81ce8aa\") " pod="openstack/memcached-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.466020 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/163b8a88-dc31-4540-a39b-bfecc81ce8aa-combined-ca-bundle\") pod \"memcached-0\" (UID: \"163b8a88-dc31-4540-a39b-bfecc81ce8aa\") " pod="openstack/memcached-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.567854 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jk784\" (UniqueName: \"kubernetes.io/projected/163b8a88-dc31-4540-a39b-bfecc81ce8aa-kube-api-access-jk784\") pod \"memcached-0\" (UID: \"163b8a88-dc31-4540-a39b-bfecc81ce8aa\") " pod="openstack/memcached-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.568074 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/163b8a88-dc31-4540-a39b-bfecc81ce8aa-kolla-config\") pod \"memcached-0\" (UID: \"163b8a88-dc31-4540-a39b-bfecc81ce8aa\") " pod="openstack/memcached-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.568196 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/163b8a88-dc31-4540-a39b-bfecc81ce8aa-config-data\") pod \"memcached-0\" (UID: \"163b8a88-dc31-4540-a39b-bfecc81ce8aa\") " pod="openstack/memcached-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.568267 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/163b8a88-dc31-4540-a39b-bfecc81ce8aa-memcached-tls-certs\") pod \"memcached-0\" (UID: \"163b8a88-dc31-4540-a39b-bfecc81ce8aa\") " pod="openstack/memcached-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.568374 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/163b8a88-dc31-4540-a39b-bfecc81ce8aa-combined-ca-bundle\") pod \"memcached-0\" (UID: \"163b8a88-dc31-4540-a39b-bfecc81ce8aa\") " pod="openstack/memcached-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.568927 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/163b8a88-dc31-4540-a39b-bfecc81ce8aa-config-data\") pod \"memcached-0\" (UID: \"163b8a88-dc31-4540-a39b-bfecc81ce8aa\") " pod="openstack/memcached-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.570201 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/163b8a88-dc31-4540-a39b-bfecc81ce8aa-kolla-config\") pod \"memcached-0\" (UID: \"163b8a88-dc31-4540-a39b-bfecc81ce8aa\") " pod="openstack/memcached-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.572511 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/163b8a88-dc31-4540-a39b-bfecc81ce8aa-combined-ca-bundle\") pod \"memcached-0\" (UID: \"163b8a88-dc31-4540-a39b-bfecc81ce8aa\") " pod="openstack/memcached-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.573609 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/163b8a88-dc31-4540-a39b-bfecc81ce8aa-memcached-tls-certs\") pod \"memcached-0\" (UID: \"163b8a88-dc31-4540-a39b-bfecc81ce8aa\") " pod="openstack/memcached-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.580983 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jk784\" (UniqueName: \"kubernetes.io/projected/163b8a88-dc31-4540-a39b-bfecc81ce8aa-kube-api-access-jk784\") pod \"memcached-0\" (UID: \"163b8a88-dc31-4540-a39b-bfecc81ce8aa\") " pod="openstack/memcached-0" Oct 09 09:18:48 crc kubenswrapper[4710]: I1009 09:18:48.669785 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 09 09:18:50 crc kubenswrapper[4710]: I1009 09:18:50.109861 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 09:18:50 crc kubenswrapper[4710]: I1009 09:18:50.110971 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 09 09:18:50 crc kubenswrapper[4710]: I1009 09:18:50.117347 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-8mwgh" Oct 09 09:18:50 crc kubenswrapper[4710]: I1009 09:18:50.129922 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 09:18:50 crc kubenswrapper[4710]: I1009 09:18:50.207362 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vkk7\" (UniqueName: \"kubernetes.io/projected/dd02e6a1-01c6-402c-bebb-b34fde77ce36-kube-api-access-6vkk7\") pod \"kube-state-metrics-0\" (UID: \"dd02e6a1-01c6-402c-bebb-b34fde77ce36\") " pod="openstack/kube-state-metrics-0" Oct 09 09:18:50 crc kubenswrapper[4710]: I1009 09:18:50.308359 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vkk7\" (UniqueName: \"kubernetes.io/projected/dd02e6a1-01c6-402c-bebb-b34fde77ce36-kube-api-access-6vkk7\") pod \"kube-state-metrics-0\" (UID: \"dd02e6a1-01c6-402c-bebb-b34fde77ce36\") " pod="openstack/kube-state-metrics-0" Oct 09 09:18:50 crc kubenswrapper[4710]: I1009 09:18:50.333689 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vkk7\" (UniqueName: \"kubernetes.io/projected/dd02e6a1-01c6-402c-bebb-b34fde77ce36-kube-api-access-6vkk7\") pod \"kube-state-metrics-0\" (UID: \"dd02e6a1-01c6-402c-bebb-b34fde77ce36\") " pod="openstack/kube-state-metrics-0" Oct 09 09:18:50 crc kubenswrapper[4710]: I1009 09:18:50.445307 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.013846 4710 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.161628 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"66a04a07-2cc5-4549-9217-d5fbb82a6755","Type":"ContainerStarted","Data":"ce2bf4562cffe13ab87edabf98f76f15367dbb304554de50a85be3ec511ca4ac"} Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.164408 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b54a6a74-e8d4-4e48-b7dc-7805027dce53","Type":"ContainerStarted","Data":"b7c42f8f7ebb4835cdebee452c000190ddb250946e11a4bd5a49a3eeed5d5780"} Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.293836 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ff4n7"] Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.295139 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.297525 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.297728 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-sw6f8" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.299291 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.301331 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-b2pbr"] Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.302951 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.311091 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ff4n7"] Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.331852 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-b2pbr"] Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.401099 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c4f1833-b6b6-4c51-bd5e-0b4cf749e848-ovn-controller-tls-certs\") pod \"ovn-controller-ff4n7\" (UID: \"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848\") " pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.401164 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/72df4789-6551-4a9f-a8ba-6ed6f43f03f9-var-run\") pod \"ovn-controller-ovs-b2pbr\" (UID: \"72df4789-6551-4a9f-a8ba-6ed6f43f03f9\") " pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.401188 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8c4f1833-b6b6-4c51-bd5e-0b4cf749e848-var-log-ovn\") pod \"ovn-controller-ff4n7\" (UID: \"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848\") " pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.401338 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8c4f1833-b6b6-4c51-bd5e-0b4cf749e848-var-run-ovn\") pod \"ovn-controller-ff4n7\" (UID: \"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848\") " pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.401376 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c4f1833-b6b6-4c51-bd5e-0b4cf749e848-combined-ca-bundle\") pod \"ovn-controller-ff4n7\" (UID: \"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848\") " pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.401469 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/72df4789-6551-4a9f-a8ba-6ed6f43f03f9-etc-ovs\") pod \"ovn-controller-ovs-b2pbr\" (UID: \"72df4789-6551-4a9f-a8ba-6ed6f43f03f9\") " pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.401491 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26mxd\" (UniqueName: \"kubernetes.io/projected/8c4f1833-b6b6-4c51-bd5e-0b4cf749e848-kube-api-access-26mxd\") pod \"ovn-controller-ff4n7\" (UID: \"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848\") " pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.401528 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j794n\" (UniqueName: \"kubernetes.io/projected/72df4789-6551-4a9f-a8ba-6ed6f43f03f9-kube-api-access-j794n\") pod \"ovn-controller-ovs-b2pbr\" (UID: \"72df4789-6551-4a9f-a8ba-6ed6f43f03f9\") " pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.401560 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8c4f1833-b6b6-4c51-bd5e-0b4cf749e848-var-run\") pod \"ovn-controller-ff4n7\" (UID: \"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848\") " pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.401576 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8c4f1833-b6b6-4c51-bd5e-0b4cf749e848-scripts\") pod \"ovn-controller-ff4n7\" (UID: \"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848\") " pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.401658 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/72df4789-6551-4a9f-a8ba-6ed6f43f03f9-var-log\") pod \"ovn-controller-ovs-b2pbr\" (UID: \"72df4789-6551-4a9f-a8ba-6ed6f43f03f9\") " pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.401692 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/72df4789-6551-4a9f-a8ba-6ed6f43f03f9-scripts\") pod \"ovn-controller-ovs-b2pbr\" (UID: \"72df4789-6551-4a9f-a8ba-6ed6f43f03f9\") " pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.401758 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/72df4789-6551-4a9f-a8ba-6ed6f43f03f9-var-lib\") pod \"ovn-controller-ovs-b2pbr\" (UID: \"72df4789-6551-4a9f-a8ba-6ed6f43f03f9\") " pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.502962 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c4f1833-b6b6-4c51-bd5e-0b4cf749e848-ovn-controller-tls-certs\") pod \"ovn-controller-ff4n7\" (UID: \"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848\") " pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.503028 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/72df4789-6551-4a9f-a8ba-6ed6f43f03f9-var-run\") pod \"ovn-controller-ovs-b2pbr\" (UID: \"72df4789-6551-4a9f-a8ba-6ed6f43f03f9\") " pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.503047 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8c4f1833-b6b6-4c51-bd5e-0b4cf749e848-var-log-ovn\") pod \"ovn-controller-ff4n7\" (UID: \"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848\") " pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.503088 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8c4f1833-b6b6-4c51-bd5e-0b4cf749e848-var-run-ovn\") pod \"ovn-controller-ff4n7\" (UID: \"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848\") " pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.503114 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c4f1833-b6b6-4c51-bd5e-0b4cf749e848-combined-ca-bundle\") pod \"ovn-controller-ff4n7\" (UID: \"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848\") " pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.503158 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/72df4789-6551-4a9f-a8ba-6ed6f43f03f9-etc-ovs\") pod \"ovn-controller-ovs-b2pbr\" (UID: \"72df4789-6551-4a9f-a8ba-6ed6f43f03f9\") " pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.503181 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26mxd\" (UniqueName: \"kubernetes.io/projected/8c4f1833-b6b6-4c51-bd5e-0b4cf749e848-kube-api-access-26mxd\") pod \"ovn-controller-ff4n7\" (UID: \"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848\") " pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.503213 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j794n\" (UniqueName: \"kubernetes.io/projected/72df4789-6551-4a9f-a8ba-6ed6f43f03f9-kube-api-access-j794n\") pod \"ovn-controller-ovs-b2pbr\" (UID: \"72df4789-6551-4a9f-a8ba-6ed6f43f03f9\") " pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.503260 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8c4f1833-b6b6-4c51-bd5e-0b4cf749e848-var-run\") pod \"ovn-controller-ff4n7\" (UID: \"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848\") " pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.503280 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8c4f1833-b6b6-4c51-bd5e-0b4cf749e848-scripts\") pod \"ovn-controller-ff4n7\" (UID: \"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848\") " pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.503300 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/72df4789-6551-4a9f-a8ba-6ed6f43f03f9-var-log\") pod \"ovn-controller-ovs-b2pbr\" (UID: \"72df4789-6551-4a9f-a8ba-6ed6f43f03f9\") " pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.503333 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/72df4789-6551-4a9f-a8ba-6ed6f43f03f9-scripts\") pod \"ovn-controller-ovs-b2pbr\" (UID: \"72df4789-6551-4a9f-a8ba-6ed6f43f03f9\") " pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.503348 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/72df4789-6551-4a9f-a8ba-6ed6f43f03f9-var-lib\") pod \"ovn-controller-ovs-b2pbr\" (UID: \"72df4789-6551-4a9f-a8ba-6ed6f43f03f9\") " pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.503904 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/72df4789-6551-4a9f-a8ba-6ed6f43f03f9-var-lib\") pod \"ovn-controller-ovs-b2pbr\" (UID: \"72df4789-6551-4a9f-a8ba-6ed6f43f03f9\") " pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.505138 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/72df4789-6551-4a9f-a8ba-6ed6f43f03f9-var-run\") pod \"ovn-controller-ovs-b2pbr\" (UID: \"72df4789-6551-4a9f-a8ba-6ed6f43f03f9\") " pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.505311 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8c4f1833-b6b6-4c51-bd5e-0b4cf749e848-var-log-ovn\") pod \"ovn-controller-ff4n7\" (UID: \"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848\") " pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.505460 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8c4f1833-b6b6-4c51-bd5e-0b4cf749e848-var-run-ovn\") pod \"ovn-controller-ff4n7\" (UID: \"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848\") " pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.509205 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/72df4789-6551-4a9f-a8ba-6ed6f43f03f9-etc-ovs\") pod \"ovn-controller-ovs-b2pbr\" (UID: \"72df4789-6551-4a9f-a8ba-6ed6f43f03f9\") " pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.509207 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/72df4789-6551-4a9f-a8ba-6ed6f43f03f9-var-log\") pod \"ovn-controller-ovs-b2pbr\" (UID: \"72df4789-6551-4a9f-a8ba-6ed6f43f03f9\") " pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.511117 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/72df4789-6551-4a9f-a8ba-6ed6f43f03f9-scripts\") pod \"ovn-controller-ovs-b2pbr\" (UID: \"72df4789-6551-4a9f-a8ba-6ed6f43f03f9\") " pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.511199 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8c4f1833-b6b6-4c51-bd5e-0b4cf749e848-var-run\") pod \"ovn-controller-ff4n7\" (UID: \"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848\") " pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.511281 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c4f1833-b6b6-4c51-bd5e-0b4cf749e848-combined-ca-bundle\") pod \"ovn-controller-ff4n7\" (UID: \"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848\") " pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.512709 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8c4f1833-b6b6-4c51-bd5e-0b4cf749e848-scripts\") pod \"ovn-controller-ff4n7\" (UID: \"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848\") " pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.516955 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c4f1833-b6b6-4c51-bd5e-0b4cf749e848-ovn-controller-tls-certs\") pod \"ovn-controller-ff4n7\" (UID: \"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848\") " pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.524420 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26mxd\" (UniqueName: \"kubernetes.io/projected/8c4f1833-b6b6-4c51-bd5e-0b4cf749e848-kube-api-access-26mxd\") pod \"ovn-controller-ff4n7\" (UID: \"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848\") " pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.529145 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j794n\" (UniqueName: \"kubernetes.io/projected/72df4789-6551-4a9f-a8ba-6ed6f43f03f9-kube-api-access-j794n\") pod \"ovn-controller-ovs-b2pbr\" (UID: \"72df4789-6551-4a9f-a8ba-6ed6f43f03f9\") " pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.616614 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ff4n7" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.622841 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.966271 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.967583 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.971999 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.972247 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.972579 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.972715 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-k7xc8" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.973862 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 09 09:18:54 crc kubenswrapper[4710]: I1009 09:18:54.991586 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.010957 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.011097 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f-config\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.011163 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dsdn6\" (UniqueName: \"kubernetes.io/projected/c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f-kube-api-access-dsdn6\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.011189 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.011236 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.011258 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.011283 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.011347 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.112386 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dsdn6\" (UniqueName: \"kubernetes.io/projected/c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f-kube-api-access-dsdn6\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.112451 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.112513 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.112541 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.112574 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.112750 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.112772 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.112781 4710 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.112897 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f-config\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.113859 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.114420 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f-config\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.116957 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.120614 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.123407 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.127616 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.130007 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dsdn6\" (UniqueName: \"kubernetes.io/projected/c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f-kube-api-access-dsdn6\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.142245 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f\") " pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:55 crc kubenswrapper[4710]: I1009 09:18:55.284670 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 09 09:18:56 crc kubenswrapper[4710]: I1009 09:18:56.954078 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 09 09:18:56 crc kubenswrapper[4710]: I1009 09:18:56.956040 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:56 crc kubenswrapper[4710]: I1009 09:18:56.958879 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 09 09:18:56 crc kubenswrapper[4710]: I1009 09:18:56.958978 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Oct 09 09:18:56 crc kubenswrapper[4710]: I1009 09:18:56.959069 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-5pwcr" Oct 09 09:18:56 crc kubenswrapper[4710]: I1009 09:18:56.960557 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 09 09:18:56 crc kubenswrapper[4710]: I1009 09:18:56.966091 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.052859 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6fdz\" (UniqueName: \"kubernetes.io/projected/958e4ef0-c38c-411d-9893-bac75789df76-kube-api-access-f6fdz\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.052978 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/958e4ef0-c38c-411d-9893-bac75789df76-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.053000 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.053028 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/958e4ef0-c38c-411d-9893-bac75789df76-config\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.053064 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/958e4ef0-c38c-411d-9893-bac75789df76-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.053081 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/958e4ef0-c38c-411d-9893-bac75789df76-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.053499 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/958e4ef0-c38c-411d-9893-bac75789df76-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.054286 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/958e4ef0-c38c-411d-9893-bac75789df76-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.156346 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/958e4ef0-c38c-411d-9893-bac75789df76-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.156408 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.156456 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/958e4ef0-c38c-411d-9893-bac75789df76-config\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.156480 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/958e4ef0-c38c-411d-9893-bac75789df76-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.156494 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/958e4ef0-c38c-411d-9893-bac75789df76-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.156520 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/958e4ef0-c38c-411d-9893-bac75789df76-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.156581 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/958e4ef0-c38c-411d-9893-bac75789df76-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.156643 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6fdz\" (UniqueName: \"kubernetes.io/projected/958e4ef0-c38c-411d-9893-bac75789df76-kube-api-access-f6fdz\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.157449 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/958e4ef0-c38c-411d-9893-bac75789df76-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.157648 4710 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.157787 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/958e4ef0-c38c-411d-9893-bac75789df76-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.158330 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/958e4ef0-c38c-411d-9893-bac75789df76-config\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.160588 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/958e4ef0-c38c-411d-9893-bac75789df76-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.161648 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/958e4ef0-c38c-411d-9893-bac75789df76-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.169988 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/958e4ef0-c38c-411d-9893-bac75789df76-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.181108 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6fdz\" (UniqueName: \"kubernetes.io/projected/958e4ef0-c38c-411d-9893-bac75789df76-kube-api-access-f6fdz\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.194503 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"958e4ef0-c38c-411d-9893-bac75789df76\") " pod="openstack/ovsdbserver-nb-0" Oct 09 09:18:57 crc kubenswrapper[4710]: I1009 09:18:57.279879 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 09 09:19:04 crc kubenswrapper[4710]: E1009 09:19:04.273343 4710 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:c4e71b2158fd939dad8b8e705273493051d3023273d23b279f2699dce6db33df" Oct 09 09:19:04 crc kubenswrapper[4710]: E1009 09:19:04.274682 4710 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:c4e71b2158fd939dad8b8e705273493051d3023273d23b279f2699dce6db33df,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cg6ft,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-77597f887-gzssh_openstack(c4ddf393-082f-4ea4-9644-c6e03236fc02): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 09:19:04 crc kubenswrapper[4710]: E1009 09:19:04.275989 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-77597f887-gzssh" podUID="c4ddf393-082f-4ea4-9644-c6e03236fc02" Oct 09 09:19:05 crc kubenswrapper[4710]: E1009 09:19:05.258177 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:c4e71b2158fd939dad8b8e705273493051d3023273d23b279f2699dce6db33df\\\"\"" pod="openstack/dnsmasq-dns-77597f887-gzssh" podUID="c4ddf393-082f-4ea4-9644-c6e03236fc02" Oct 09 09:19:05 crc kubenswrapper[4710]: E1009 09:19:05.368714 4710 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:c4e71b2158fd939dad8b8e705273493051d3023273d23b279f2699dce6db33df" Oct 09 09:19:05 crc kubenswrapper[4710]: E1009 09:19:05.368896 4710 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:c4e71b2158fd939dad8b8e705273493051d3023273d23b279f2699dce6db33df,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h8pxl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-644597f84c-ldfb2_openstack(38070298-7793-4fda-adaf-471d64d2b023): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 09:19:05 crc kubenswrapper[4710]: E1009 09:19:05.370226 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-644597f84c-ldfb2" podUID="38070298-7793-4fda-adaf-471d64d2b023" Oct 09 09:19:05 crc kubenswrapper[4710]: E1009 09:19:05.440475 4710 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:c4e71b2158fd939dad8b8e705273493051d3023273d23b279f2699dce6db33df" Oct 09 09:19:05 crc kubenswrapper[4710]: E1009 09:19:05.440701 4710 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:c4e71b2158fd939dad8b8e705273493051d3023273d23b279f2699dce6db33df,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5zd7l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-758b79db4c-tvwcb_openstack(b235be2d-aaf9-45a0-8984-dc00f8b4d77e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 09:19:05 crc kubenswrapper[4710]: E1009 09:19:05.441823 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-758b79db4c-tvwcb" podUID="b235be2d-aaf9-45a0-8984-dc00f8b4d77e" Oct 09 09:19:05 crc kubenswrapper[4710]: I1009 09:19:05.545418 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:19:05 crc kubenswrapper[4710]: I1009 09:19:05.545708 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:19:05 crc kubenswrapper[4710]: E1009 09:19:05.571642 4710 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:c4e71b2158fd939dad8b8e705273493051d3023273d23b279f2699dce6db33df" Oct 09 09:19:05 crc kubenswrapper[4710]: E1009 09:19:05.571805 4710 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:c4e71b2158fd939dad8b8e705273493051d3023273d23b279f2699dce6db33df,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vsjqz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-7bfcb9d745-trlqp_openstack(7324cbbc-2a78-433d-9be4-65579c637a99): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 09:19:05 crc kubenswrapper[4710]: E1009 09:19:05.574375 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-7bfcb9d745-trlqp" podUID="7324cbbc-2a78-433d-9be4-65579c637a99" Oct 09 09:19:05 crc kubenswrapper[4710]: I1009 09:19:05.826388 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 09 09:19:05 crc kubenswrapper[4710]: W1009 09:19:05.838474 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod163b8a88_dc31_4540_a39b_bfecc81ce8aa.slice/crio-ee5fcd342f8a65fd396fbee1581bf2e59aa44c2f385d1c586cb97d4b1db4f724 WatchSource:0}: Error finding container ee5fcd342f8a65fd396fbee1581bf2e59aa44c2f385d1c586cb97d4b1db4f724: Status 404 returned error can't find the container with id ee5fcd342f8a65fd396fbee1581bf2e59aa44c2f385d1c586cb97d4b1db4f724 Oct 09 09:19:05 crc kubenswrapper[4710]: I1009 09:19:05.978315 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 09:19:06 crc kubenswrapper[4710]: I1009 09:19:06.038845 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 09 09:19:06 crc kubenswrapper[4710]: I1009 09:19:06.178480 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ff4n7"] Oct 09 09:19:06 crc kubenswrapper[4710]: I1009 09:19:06.192020 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 09 09:19:06 crc kubenswrapper[4710]: I1009 09:19:06.262693 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"dd02e6a1-01c6-402c-bebb-b34fde77ce36","Type":"ContainerStarted","Data":"33acd33ce3caee2780d890ade8f95805599056134e45d99173e03bfb9203acff"} Oct 09 09:19:06 crc kubenswrapper[4710]: I1009 09:19:06.263783 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"163b8a88-dc31-4540-a39b-bfecc81ce8aa","Type":"ContainerStarted","Data":"ee5fcd342f8a65fd396fbee1581bf2e59aa44c2f385d1c586cb97d4b1db4f724"} Oct 09 09:19:06 crc kubenswrapper[4710]: I1009 09:19:06.264661 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"bc62be28-ccca-4b4b-b7d0-afabccec4047","Type":"ContainerStarted","Data":"f8161b0d6ba75427b343b34914ba03bf205ad1134ad7f0ce3dc8d4115f769b4c"} Oct 09 09:19:06 crc kubenswrapper[4710]: E1009 09:19:06.266536 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:c4e71b2158fd939dad8b8e705273493051d3023273d23b279f2699dce6db33df\\\"\"" pod="openstack/dnsmasq-dns-644597f84c-ldfb2" podUID="38070298-7793-4fda-adaf-471d64d2b023" Oct 09 09:19:06 crc kubenswrapper[4710]: I1009 09:19:06.317250 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-b2pbr"] Oct 09 09:19:06 crc kubenswrapper[4710]: W1009 09:19:06.385800 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8c4f1833_b6b6_4c51_bd5e_0b4cf749e848.slice/crio-67d5aa1497c0988b42bc7372595fa3dc085caf31102738c02117307ea6a59a35 WatchSource:0}: Error finding container 67d5aa1497c0988b42bc7372595fa3dc085caf31102738c02117307ea6a59a35: Status 404 returned error can't find the container with id 67d5aa1497c0988b42bc7372595fa3dc085caf31102738c02117307ea6a59a35 Oct 09 09:19:06 crc kubenswrapper[4710]: I1009 09:19:06.426558 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 09 09:19:06 crc kubenswrapper[4710]: I1009 09:19:06.970573 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-758b79db4c-tvwcb" Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.037724 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.085800 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bfcb9d745-trlqp" Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.086473 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b235be2d-aaf9-45a0-8984-dc00f8b4d77e-config\") pod \"b235be2d-aaf9-45a0-8984-dc00f8b4d77e\" (UID: \"b235be2d-aaf9-45a0-8984-dc00f8b4d77e\") " Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.086538 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b235be2d-aaf9-45a0-8984-dc00f8b4d77e-dns-svc\") pod \"b235be2d-aaf9-45a0-8984-dc00f8b4d77e\" (UID: \"b235be2d-aaf9-45a0-8984-dc00f8b4d77e\") " Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.086634 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zd7l\" (UniqueName: \"kubernetes.io/projected/b235be2d-aaf9-45a0-8984-dc00f8b4d77e-kube-api-access-5zd7l\") pod \"b235be2d-aaf9-45a0-8984-dc00f8b4d77e\" (UID: \"b235be2d-aaf9-45a0-8984-dc00f8b4d77e\") " Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.087140 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b235be2d-aaf9-45a0-8984-dc00f8b4d77e-config" (OuterVolumeSpecName: "config") pod "b235be2d-aaf9-45a0-8984-dc00f8b4d77e" (UID: "b235be2d-aaf9-45a0-8984-dc00f8b4d77e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.087556 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b235be2d-aaf9-45a0-8984-dc00f8b4d77e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b235be2d-aaf9-45a0-8984-dc00f8b4d77e" (UID: "b235be2d-aaf9-45a0-8984-dc00f8b4d77e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.092180 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b235be2d-aaf9-45a0-8984-dc00f8b4d77e-kube-api-access-5zd7l" (OuterVolumeSpecName: "kube-api-access-5zd7l") pod "b235be2d-aaf9-45a0-8984-dc00f8b4d77e" (UID: "b235be2d-aaf9-45a0-8984-dc00f8b4d77e"). InnerVolumeSpecName "kube-api-access-5zd7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.187890 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7324cbbc-2a78-433d-9be4-65579c637a99-config\") pod \"7324cbbc-2a78-433d-9be4-65579c637a99\" (UID: \"7324cbbc-2a78-433d-9be4-65579c637a99\") " Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.187936 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vsjqz\" (UniqueName: \"kubernetes.io/projected/7324cbbc-2a78-433d-9be4-65579c637a99-kube-api-access-vsjqz\") pod \"7324cbbc-2a78-433d-9be4-65579c637a99\" (UID: \"7324cbbc-2a78-433d-9be4-65579c637a99\") " Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.188476 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7324cbbc-2a78-433d-9be4-65579c637a99-config" (OuterVolumeSpecName: "config") pod "7324cbbc-2a78-433d-9be4-65579c637a99" (UID: "7324cbbc-2a78-433d-9be4-65579c637a99"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.188592 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7324cbbc-2a78-433d-9be4-65579c637a99-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.188613 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zd7l\" (UniqueName: \"kubernetes.io/projected/b235be2d-aaf9-45a0-8984-dc00f8b4d77e-kube-api-access-5zd7l\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.188625 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b235be2d-aaf9-45a0-8984-dc00f8b4d77e-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.188634 4710 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b235be2d-aaf9-45a0-8984-dc00f8b4d77e-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.190963 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7324cbbc-2a78-433d-9be4-65579c637a99-kube-api-access-vsjqz" (OuterVolumeSpecName: "kube-api-access-vsjqz") pod "7324cbbc-2a78-433d-9be4-65579c637a99" (UID: "7324cbbc-2a78-433d-9be4-65579c637a99"). InnerVolumeSpecName "kube-api-access-vsjqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.281055 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bfcb9d745-trlqp" Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.281052 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bfcb9d745-trlqp" event={"ID":"7324cbbc-2a78-433d-9be4-65579c637a99","Type":"ContainerDied","Data":"028e7879016a3f5258f058ee8f8a404955ae707f1e97324772c06baf4483605c"} Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.282756 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b54a6a74-e8d4-4e48-b7dc-7805027dce53","Type":"ContainerStarted","Data":"2f54f5b6b2db40672622d4587eeace11474707658f13f2878be4600122b0ee55"} Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.287118 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"57584eef-cc17-41b8-a307-663a50cbf568","Type":"ContainerStarted","Data":"64d54ba4f38cd827162edf700a71873ce333f4617f67a5253c265a25c2a8a415"} Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.289753 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"66a04a07-2cc5-4549-9217-d5fbb82a6755","Type":"ContainerStarted","Data":"af95ef995d1637c54da57d37bb05910a94919fa0b7fec16b0dc08edb0af96906"} Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.289804 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vsjqz\" (UniqueName: \"kubernetes.io/projected/7324cbbc-2a78-433d-9be4-65579c637a99-kube-api-access-vsjqz\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.291377 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f","Type":"ContainerStarted","Data":"fefd242e7fc1c0213e5357ee7da0708bf7d66f1121bdfa91a920b9dbb6fb4d56"} Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.293042 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-758b79db4c-tvwcb" event={"ID":"b235be2d-aaf9-45a0-8984-dc00f8b4d77e","Type":"ContainerDied","Data":"d951a231b596e0d55ea42ce64c6fd9d96d8a57fb8293d98d814eb5fd16e4bb97"} Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.293096 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-758b79db4c-tvwcb" Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.295514 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"958e4ef0-c38c-411d-9893-bac75789df76","Type":"ContainerStarted","Data":"d63c17dc154a2755e41ba1e82b1f4828433277f60d908116a89e4cc84e27ea2d"} Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.296864 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ff4n7" event={"ID":"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848","Type":"ContainerStarted","Data":"67d5aa1497c0988b42bc7372595fa3dc085caf31102738c02117307ea6a59a35"} Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.298391 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-b2pbr" event={"ID":"72df4789-6551-4a9f-a8ba-6ed6f43f03f9","Type":"ContainerStarted","Data":"4ad23568f62abb38dad5ae00c88ed4a8ee0ea8709ba84750616e7973ed064361"} Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.376907 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bfcb9d745-trlqp"] Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.385777 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7bfcb9d745-trlqp"] Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.401650 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-758b79db4c-tvwcb"] Oct 09 09:19:07 crc kubenswrapper[4710]: I1009 09:19:07.406414 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-758b79db4c-tvwcb"] Oct 09 09:19:08 crc kubenswrapper[4710]: I1009 09:19:08.830584 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7324cbbc-2a78-433d-9be4-65579c637a99" path="/var/lib/kubelet/pods/7324cbbc-2a78-433d-9be4-65579c637a99/volumes" Oct 09 09:19:08 crc kubenswrapper[4710]: I1009 09:19:08.831399 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b235be2d-aaf9-45a0-8984-dc00f8b4d77e" path="/var/lib/kubelet/pods/b235be2d-aaf9-45a0-8984-dc00f8b4d77e/volumes" Oct 09 09:19:14 crc kubenswrapper[4710]: I1009 09:19:14.373889 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"57584eef-cc17-41b8-a307-663a50cbf568","Type":"ContainerStarted","Data":"13e858d4a1b1d7c10842903fa14abd6a15da58597fc14ca3cc1d36f448341b6a"} Oct 09 09:19:14 crc kubenswrapper[4710]: I1009 09:19:14.378495 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"dd02e6a1-01c6-402c-bebb-b34fde77ce36","Type":"ContainerStarted","Data":"978bc13b627fae0d01b18b5a1941c5eb70c54189ee76bb959345571defc0fa83"} Oct 09 09:19:14 crc kubenswrapper[4710]: I1009 09:19:14.378657 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 09 09:19:14 crc kubenswrapper[4710]: I1009 09:19:14.381479 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"163b8a88-dc31-4540-a39b-bfecc81ce8aa","Type":"ContainerStarted","Data":"641ec8f93b261490e5cf9ec5d481fa8cf067ab83eeb009b47ca1c167d417db59"} Oct 09 09:19:14 crc kubenswrapper[4710]: I1009 09:19:14.381601 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 09 09:19:14 crc kubenswrapper[4710]: I1009 09:19:14.385091 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"bc62be28-ccca-4b4b-b7d0-afabccec4047","Type":"ContainerStarted","Data":"343f91df12e774783d84bfed87210419597c2bd3c20cd73b45babc30c225704e"} Oct 09 09:19:14 crc kubenswrapper[4710]: I1009 09:19:14.387164 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f","Type":"ContainerStarted","Data":"8e5bb1425f3f519a8dc0cc7b16197c7b13ee6aa947305de8838e5d1fd21e5b9e"} Oct 09 09:19:14 crc kubenswrapper[4710]: I1009 09:19:14.394136 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"958e4ef0-c38c-411d-9893-bac75789df76","Type":"ContainerStarted","Data":"83fcad618b536ea15e8eec1a734cae667e418c4b8bc53502e2bf118bdae17b09"} Oct 09 09:19:14 crc kubenswrapper[4710]: I1009 09:19:14.397529 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ff4n7" event={"ID":"8c4f1833-b6b6-4c51-bd5e-0b4cf749e848","Type":"ContainerStarted","Data":"40876ae7f1defc765852a99e3723a293c4615a9d765f2ac6902e19c18ffe8fd0"} Oct 09 09:19:14 crc kubenswrapper[4710]: I1009 09:19:14.397579 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ff4n7" Oct 09 09:19:14 crc kubenswrapper[4710]: I1009 09:19:14.398901 4710 generic.go:334] "Generic (PLEG): container finished" podID="72df4789-6551-4a9f-a8ba-6ed6f43f03f9" containerID="8d342168d89b4b42c4f02670ec9624516341f16bcd2a068128241ca2f522b2b3" exitCode=0 Oct 09 09:19:14 crc kubenswrapper[4710]: I1009 09:19:14.398937 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-b2pbr" event={"ID":"72df4789-6551-4a9f-a8ba-6ed6f43f03f9","Type":"ContainerDied","Data":"8d342168d89b4b42c4f02670ec9624516341f16bcd2a068128241ca2f522b2b3"} Oct 09 09:19:14 crc kubenswrapper[4710]: I1009 09:19:14.414857 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=20.525977775 podStartE2EDuration="26.414841823s" podCreationTimestamp="2025-10-09 09:18:48 +0000 UTC" firstStartedPulling="2025-10-09 09:19:05.843172694 +0000 UTC m=+869.333281091" lastFinishedPulling="2025-10-09 09:19:11.732036742 +0000 UTC m=+875.222145139" observedRunningTime="2025-10-09 09:19:14.408993947 +0000 UTC m=+877.899102345" watchObservedRunningTime="2025-10-09 09:19:14.414841823 +0000 UTC m=+877.904950221" Oct 09 09:19:14 crc kubenswrapper[4710]: I1009 09:19:14.426325 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=16.748924507 podStartE2EDuration="24.426313024s" podCreationTimestamp="2025-10-09 09:18:50 +0000 UTC" firstStartedPulling="2025-10-09 09:19:05.985171077 +0000 UTC m=+869.475279474" lastFinishedPulling="2025-10-09 09:19:13.662559593 +0000 UTC m=+877.152667991" observedRunningTime="2025-10-09 09:19:14.425606502 +0000 UTC m=+877.915714919" watchObservedRunningTime="2025-10-09 09:19:14.426313024 +0000 UTC m=+877.916421421" Oct 09 09:19:14 crc kubenswrapper[4710]: I1009 09:19:14.482890 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ff4n7" podStartSLOduration=13.204241408 podStartE2EDuration="20.482869938s" podCreationTimestamp="2025-10-09 09:18:54 +0000 UTC" firstStartedPulling="2025-10-09 09:19:06.387881151 +0000 UTC m=+869.877989548" lastFinishedPulling="2025-10-09 09:19:13.666509681 +0000 UTC m=+877.156618078" observedRunningTime="2025-10-09 09:19:14.462377017 +0000 UTC m=+877.952485414" watchObservedRunningTime="2025-10-09 09:19:14.482869938 +0000 UTC m=+877.972978335" Oct 09 09:19:15 crc kubenswrapper[4710]: I1009 09:19:15.414008 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-b2pbr" event={"ID":"72df4789-6551-4a9f-a8ba-6ed6f43f03f9","Type":"ContainerStarted","Data":"7bf0136ab8ccad0dfbeeec6e64e32c233c640e6febe5cc35fe1884058b3dbbe1"} Oct 09 09:19:15 crc kubenswrapper[4710]: I1009 09:19:15.414655 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-b2pbr" event={"ID":"72df4789-6551-4a9f-a8ba-6ed6f43f03f9","Type":"ContainerStarted","Data":"0310405b937b2dab6a792b5c4a9d4b8c40ae3690747d0b9966f67bf141e25ad4"} Oct 09 09:19:16 crc kubenswrapper[4710]: I1009 09:19:16.439772 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:19:16 crc kubenswrapper[4710]: I1009 09:19:16.440229 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:19:16 crc kubenswrapper[4710]: I1009 09:19:16.838450 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-b2pbr" podStartSLOduration=15.82366065 podStartE2EDuration="22.83841144s" podCreationTimestamp="2025-10-09 09:18:54 +0000 UTC" firstStartedPulling="2025-10-09 09:19:06.647825244 +0000 UTC m=+870.137933641" lastFinishedPulling="2025-10-09 09:19:13.662576034 +0000 UTC m=+877.152684431" observedRunningTime="2025-10-09 09:19:15.433902634 +0000 UTC m=+878.924011031" watchObservedRunningTime="2025-10-09 09:19:16.83841144 +0000 UTC m=+880.328519837" Oct 09 09:19:17 crc kubenswrapper[4710]: I1009 09:19:17.452394 4710 generic.go:334] "Generic (PLEG): container finished" podID="57584eef-cc17-41b8-a307-663a50cbf568" containerID="13e858d4a1b1d7c10842903fa14abd6a15da58597fc14ca3cc1d36f448341b6a" exitCode=0 Oct 09 09:19:17 crc kubenswrapper[4710]: I1009 09:19:17.452494 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"57584eef-cc17-41b8-a307-663a50cbf568","Type":"ContainerDied","Data":"13e858d4a1b1d7c10842903fa14abd6a15da58597fc14ca3cc1d36f448341b6a"} Oct 09 09:19:17 crc kubenswrapper[4710]: I1009 09:19:17.459370 4710 generic.go:334] "Generic (PLEG): container finished" podID="bc62be28-ccca-4b4b-b7d0-afabccec4047" containerID="343f91df12e774783d84bfed87210419597c2bd3c20cd73b45babc30c225704e" exitCode=0 Oct 09 09:19:17 crc kubenswrapper[4710]: I1009 09:19:17.459422 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"bc62be28-ccca-4b4b-b7d0-afabccec4047","Type":"ContainerDied","Data":"343f91df12e774783d84bfed87210419597c2bd3c20cd73b45babc30c225704e"} Oct 09 09:19:17 crc kubenswrapper[4710]: I1009 09:19:17.463054 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f","Type":"ContainerStarted","Data":"783b5b3d9e93a86a980114acee47fffd4b8e3920f6f20f4b39274a01cdab1b0e"} Oct 09 09:19:17 crc kubenswrapper[4710]: I1009 09:19:17.467582 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"958e4ef0-c38c-411d-9893-bac75789df76","Type":"ContainerStarted","Data":"9c425a38c511633bad945c17f97385b5f1066e81da31c986104e03d16cc5d943"} Oct 09 09:19:17 crc kubenswrapper[4710]: I1009 09:19:17.505202 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=14.352161967 podStartE2EDuration="24.505181687s" podCreationTimestamp="2025-10-09 09:18:53 +0000 UTC" firstStartedPulling="2025-10-09 09:19:07.073763817 +0000 UTC m=+870.563872215" lastFinishedPulling="2025-10-09 09:19:17.226783538 +0000 UTC m=+880.716891935" observedRunningTime="2025-10-09 09:19:17.503984731 +0000 UTC m=+880.994093128" watchObservedRunningTime="2025-10-09 09:19:17.505181687 +0000 UTC m=+880.995290084" Oct 09 09:19:17 crc kubenswrapper[4710]: I1009 09:19:17.524597 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=11.944049386 podStartE2EDuration="22.524553385s" podCreationTimestamp="2025-10-09 09:18:55 +0000 UTC" firstStartedPulling="2025-10-09 09:19:06.647633752 +0000 UTC m=+870.137742150" lastFinishedPulling="2025-10-09 09:19:17.228137761 +0000 UTC m=+880.718246149" observedRunningTime="2025-10-09 09:19:17.522896721 +0000 UTC m=+881.013005119" watchObservedRunningTime="2025-10-09 09:19:17.524553385 +0000 UTC m=+881.014661772" Oct 09 09:19:18 crc kubenswrapper[4710]: I1009 09:19:18.280262 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 09 09:19:18 crc kubenswrapper[4710]: I1009 09:19:18.318162 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 09 09:19:18 crc kubenswrapper[4710]: I1009 09:19:18.476681 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"57584eef-cc17-41b8-a307-663a50cbf568","Type":"ContainerStarted","Data":"e87eb14b06bf0f4482d5521fdcc5ad928da1d88ecd1961d1f5188e1cf509daca"} Oct 09 09:19:18 crc kubenswrapper[4710]: I1009 09:19:18.480136 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"bc62be28-ccca-4b4b-b7d0-afabccec4047","Type":"ContainerStarted","Data":"34538e80764622351991f4723b685ce8c39086b26786f4ea1fe0bf535260e299"} Oct 09 09:19:18 crc kubenswrapper[4710]: I1009 09:19:18.480658 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 09 09:19:18 crc kubenswrapper[4710]: I1009 09:19:18.504530 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=25.340412081 podStartE2EDuration="32.504502828s" podCreationTimestamp="2025-10-09 09:18:46 +0000 UTC" firstStartedPulling="2025-10-09 09:19:06.523738294 +0000 UTC m=+870.013846692" lastFinishedPulling="2025-10-09 09:19:13.687829042 +0000 UTC m=+877.177937439" observedRunningTime="2025-10-09 09:19:18.495946953 +0000 UTC m=+881.986055351" watchObservedRunningTime="2025-10-09 09:19:18.504502828 +0000 UTC m=+881.994611224" Oct 09 09:19:18 crc kubenswrapper[4710]: I1009 09:19:18.518000 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=24.856204735 podStartE2EDuration="32.517975752s" podCreationTimestamp="2025-10-09 09:18:46 +0000 UTC" firstStartedPulling="2025-10-09 09:19:06.042101756 +0000 UTC m=+869.532210154" lastFinishedPulling="2025-10-09 09:19:13.703872774 +0000 UTC m=+877.193981171" observedRunningTime="2025-10-09 09:19:18.514753365 +0000 UTC m=+882.004861763" watchObservedRunningTime="2025-10-09 09:19:18.517975752 +0000 UTC m=+882.008084138" Oct 09 09:19:18 crc kubenswrapper[4710]: I1009 09:19:18.673199 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 09 09:19:19 crc kubenswrapper[4710]: I1009 09:19:19.285683 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 09 09:19:19 crc kubenswrapper[4710]: I1009 09:19:19.322290 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 09 09:19:19 crc kubenswrapper[4710]: I1009 09:19:19.488998 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 09 09:19:19 crc kubenswrapper[4710]: I1009 09:19:19.519807 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 09 09:19:19 crc kubenswrapper[4710]: I1009 09:19:19.521592 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 09 09:19:19 crc kubenswrapper[4710]: I1009 09:19:19.772204 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-644597f84c-ldfb2"] Oct 09 09:19:19 crc kubenswrapper[4710]: I1009 09:19:19.832597 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6d8fd57975-2s7qz"] Oct 09 09:19:19 crc kubenswrapper[4710]: I1009 09:19:19.835614 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" Oct 09 09:19:19 crc kubenswrapper[4710]: I1009 09:19:19.838859 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 09 09:19:19 crc kubenswrapper[4710]: I1009 09:19:19.845912 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d8fd57975-2s7qz"] Oct 09 09:19:19 crc kubenswrapper[4710]: I1009 09:19:19.886233 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-vcl7d"] Oct 09 09:19:19 crc kubenswrapper[4710]: I1009 09:19:19.887301 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-vcl7d" Oct 09 09:19:19 crc kubenswrapper[4710]: I1009 09:19:19.891229 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 09 09:19:19 crc kubenswrapper[4710]: I1009 09:19:19.914563 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-vcl7d"] Oct 09 09:19:19 crc kubenswrapper[4710]: I1009 09:19:19.961774 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-ovsdbserver-nb\") pod \"dnsmasq-dns-6d8fd57975-2s7qz\" (UID: \"6e6f7af3-2e0b-4135-9f26-b6912ded0cce\") " pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" Oct 09 09:19:19 crc kubenswrapper[4710]: I1009 09:19:19.961813 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-dns-svc\") pod \"dnsmasq-dns-6d8fd57975-2s7qz\" (UID: \"6e6f7af3-2e0b-4135-9f26-b6912ded0cce\") " pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" Oct 09 09:19:19 crc kubenswrapper[4710]: I1009 09:19:19.961913 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-config\") pod \"dnsmasq-dns-6d8fd57975-2s7qz\" (UID: \"6e6f7af3-2e0b-4135-9f26-b6912ded0cce\") " pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" Oct 09 09:19:19 crc kubenswrapper[4710]: I1009 09:19:19.962060 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnwck\" (UniqueName: \"kubernetes.io/projected/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-kube-api-access-qnwck\") pod \"dnsmasq-dns-6d8fd57975-2s7qz\" (UID: \"6e6f7af3-2e0b-4135-9f26-b6912ded0cce\") " pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.013704 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77597f887-gzssh"] Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.036856 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bc45f6dcf-8l6f9"] Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.038256 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.047110 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.067635 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50c1f1ba-8a7d-482b-841f-591355f9dd44-config\") pod \"ovn-controller-metrics-vcl7d\" (UID: \"50c1f1ba-8a7d-482b-841f-591355f9dd44\") " pod="openstack/ovn-controller-metrics-vcl7d" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.067695 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-ovsdbserver-nb\") pod \"dnsmasq-dns-6d8fd57975-2s7qz\" (UID: \"6e6f7af3-2e0b-4135-9f26-b6912ded0cce\") " pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.067715 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-dns-svc\") pod \"dnsmasq-dns-6d8fd57975-2s7qz\" (UID: \"6e6f7af3-2e0b-4135-9f26-b6912ded0cce\") " pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.067758 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/50c1f1ba-8a7d-482b-841f-591355f9dd44-ovn-rundir\") pod \"ovn-controller-metrics-vcl7d\" (UID: \"50c1f1ba-8a7d-482b-841f-591355f9dd44\") " pod="openstack/ovn-controller-metrics-vcl7d" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.067789 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-config\") pod \"dnsmasq-dns-6d8fd57975-2s7qz\" (UID: \"6e6f7af3-2e0b-4135-9f26-b6912ded0cce\") " pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.067816 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50c1f1ba-8a7d-482b-841f-591355f9dd44-combined-ca-bundle\") pod \"ovn-controller-metrics-vcl7d\" (UID: \"50c1f1ba-8a7d-482b-841f-591355f9dd44\") " pod="openstack/ovn-controller-metrics-vcl7d" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.067834 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/50c1f1ba-8a7d-482b-841f-591355f9dd44-ovs-rundir\") pod \"ovn-controller-metrics-vcl7d\" (UID: \"50c1f1ba-8a7d-482b-841f-591355f9dd44\") " pod="openstack/ovn-controller-metrics-vcl7d" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.067886 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/50c1f1ba-8a7d-482b-841f-591355f9dd44-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-vcl7d\" (UID: \"50c1f1ba-8a7d-482b-841f-591355f9dd44\") " pod="openstack/ovn-controller-metrics-vcl7d" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.067914 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9t7mg\" (UniqueName: \"kubernetes.io/projected/50c1f1ba-8a7d-482b-841f-591355f9dd44-kube-api-access-9t7mg\") pod \"ovn-controller-metrics-vcl7d\" (UID: \"50c1f1ba-8a7d-482b-841f-591355f9dd44\") " pod="openstack/ovn-controller-metrics-vcl7d" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.067948 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnwck\" (UniqueName: \"kubernetes.io/projected/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-kube-api-access-qnwck\") pod \"dnsmasq-dns-6d8fd57975-2s7qz\" (UID: \"6e6f7af3-2e0b-4135-9f26-b6912ded0cce\") " pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.069064 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bc45f6dcf-8l6f9"] Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.071764 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-dns-svc\") pod \"dnsmasq-dns-6d8fd57975-2s7qz\" (UID: \"6e6f7af3-2e0b-4135-9f26-b6912ded0cce\") " pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.071852 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-config\") pod \"dnsmasq-dns-6d8fd57975-2s7qz\" (UID: \"6e6f7af3-2e0b-4135-9f26-b6912ded0cce\") " pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.072012 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-ovsdbserver-nb\") pod \"dnsmasq-dns-6d8fd57975-2s7qz\" (UID: \"6e6f7af3-2e0b-4135-9f26-b6912ded0cce\") " pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.113583 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnwck\" (UniqueName: \"kubernetes.io/projected/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-kube-api-access-qnwck\") pod \"dnsmasq-dns-6d8fd57975-2s7qz\" (UID: \"6e6f7af3-2e0b-4135-9f26-b6912ded0cce\") " pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.147321 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.148698 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.156713 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.156985 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.157170 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.157302 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-scxxt" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.169631 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-ovsdbserver-sb\") pod \"dnsmasq-dns-bc45f6dcf-8l6f9\" (UID: \"56bbc07d-0741-4064-b135-11222d19a27f\") " pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.169702 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kr2cq\" (UniqueName: \"kubernetes.io/projected/56bbc07d-0741-4064-b135-11222d19a27f-kube-api-access-kr2cq\") pod \"dnsmasq-dns-bc45f6dcf-8l6f9\" (UID: \"56bbc07d-0741-4064-b135-11222d19a27f\") " pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.169742 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50c1f1ba-8a7d-482b-841f-591355f9dd44-combined-ca-bundle\") pod \"ovn-controller-metrics-vcl7d\" (UID: \"50c1f1ba-8a7d-482b-841f-591355f9dd44\") " pod="openstack/ovn-controller-metrics-vcl7d" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.169766 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/50c1f1ba-8a7d-482b-841f-591355f9dd44-ovs-rundir\") pod \"ovn-controller-metrics-vcl7d\" (UID: \"50c1f1ba-8a7d-482b-841f-591355f9dd44\") " pod="openstack/ovn-controller-metrics-vcl7d" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.169791 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/50c1f1ba-8a7d-482b-841f-591355f9dd44-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-vcl7d\" (UID: \"50c1f1ba-8a7d-482b-841f-591355f9dd44\") " pod="openstack/ovn-controller-metrics-vcl7d" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.169842 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9t7mg\" (UniqueName: \"kubernetes.io/projected/50c1f1ba-8a7d-482b-841f-591355f9dd44-kube-api-access-9t7mg\") pod \"ovn-controller-metrics-vcl7d\" (UID: \"50c1f1ba-8a7d-482b-841f-591355f9dd44\") " pod="openstack/ovn-controller-metrics-vcl7d" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.169897 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.169903 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-dns-svc\") pod \"dnsmasq-dns-bc45f6dcf-8l6f9\" (UID: \"56bbc07d-0741-4064-b135-11222d19a27f\") " pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.170017 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50c1f1ba-8a7d-482b-841f-591355f9dd44-config\") pod \"ovn-controller-metrics-vcl7d\" (UID: \"50c1f1ba-8a7d-482b-841f-591355f9dd44\") " pod="openstack/ovn-controller-metrics-vcl7d" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.170154 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-ovsdbserver-nb\") pod \"dnsmasq-dns-bc45f6dcf-8l6f9\" (UID: \"56bbc07d-0741-4064-b135-11222d19a27f\") " pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.170194 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-config\") pod \"dnsmasq-dns-bc45f6dcf-8l6f9\" (UID: \"56bbc07d-0741-4064-b135-11222d19a27f\") " pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.170309 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/50c1f1ba-8a7d-482b-841f-591355f9dd44-ovn-rundir\") pod \"ovn-controller-metrics-vcl7d\" (UID: \"50c1f1ba-8a7d-482b-841f-591355f9dd44\") " pod="openstack/ovn-controller-metrics-vcl7d" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.170942 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50c1f1ba-8a7d-482b-841f-591355f9dd44-config\") pod \"ovn-controller-metrics-vcl7d\" (UID: \"50c1f1ba-8a7d-482b-841f-591355f9dd44\") " pod="openstack/ovn-controller-metrics-vcl7d" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.171057 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/50c1f1ba-8a7d-482b-841f-591355f9dd44-ovs-rundir\") pod \"ovn-controller-metrics-vcl7d\" (UID: \"50c1f1ba-8a7d-482b-841f-591355f9dd44\") " pod="openstack/ovn-controller-metrics-vcl7d" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.171077 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/50c1f1ba-8a7d-482b-841f-591355f9dd44-ovn-rundir\") pod \"ovn-controller-metrics-vcl7d\" (UID: \"50c1f1ba-8a7d-482b-841f-591355f9dd44\") " pod="openstack/ovn-controller-metrics-vcl7d" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.174673 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.175686 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/50c1f1ba-8a7d-482b-841f-591355f9dd44-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-vcl7d\" (UID: \"50c1f1ba-8a7d-482b-841f-591355f9dd44\") " pod="openstack/ovn-controller-metrics-vcl7d" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.188888 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50c1f1ba-8a7d-482b-841f-591355f9dd44-combined-ca-bundle\") pod \"ovn-controller-metrics-vcl7d\" (UID: \"50c1f1ba-8a7d-482b-841f-591355f9dd44\") " pod="openstack/ovn-controller-metrics-vcl7d" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.189235 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9t7mg\" (UniqueName: \"kubernetes.io/projected/50c1f1ba-8a7d-482b-841f-591355f9dd44-kube-api-access-9t7mg\") pod \"ovn-controller-metrics-vcl7d\" (UID: \"50c1f1ba-8a7d-482b-841f-591355f9dd44\") " pod="openstack/ovn-controller-metrics-vcl7d" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.207785 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-vcl7d" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.272779 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4b6b2be0-00ba-434b-9310-32ee9f286c71-scripts\") pod \"ovn-northd-0\" (UID: \"4b6b2be0-00ba-434b-9310-32ee9f286c71\") " pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.272828 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b6b2be0-00ba-434b-9310-32ee9f286c71-config\") pod \"ovn-northd-0\" (UID: \"4b6b2be0-00ba-434b-9310-32ee9f286c71\") " pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.272871 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-ovsdbserver-sb\") pod \"dnsmasq-dns-bc45f6dcf-8l6f9\" (UID: \"56bbc07d-0741-4064-b135-11222d19a27f\") " pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.272927 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kr2cq\" (UniqueName: \"kubernetes.io/projected/56bbc07d-0741-4064-b135-11222d19a27f-kube-api-access-kr2cq\") pod \"dnsmasq-dns-bc45f6dcf-8l6f9\" (UID: \"56bbc07d-0741-4064-b135-11222d19a27f\") " pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.272996 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n99k2\" (UniqueName: \"kubernetes.io/projected/4b6b2be0-00ba-434b-9310-32ee9f286c71-kube-api-access-n99k2\") pod \"ovn-northd-0\" (UID: \"4b6b2be0-00ba-434b-9310-32ee9f286c71\") " pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.273074 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-dns-svc\") pod \"dnsmasq-dns-bc45f6dcf-8l6f9\" (UID: \"56bbc07d-0741-4064-b135-11222d19a27f\") " pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.273138 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b6b2be0-00ba-434b-9310-32ee9f286c71-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"4b6b2be0-00ba-434b-9310-32ee9f286c71\") " pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.273158 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b6b2be0-00ba-434b-9310-32ee9f286c71-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"4b6b2be0-00ba-434b-9310-32ee9f286c71\") " pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.273186 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b6b2be0-00ba-434b-9310-32ee9f286c71-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"4b6b2be0-00ba-434b-9310-32ee9f286c71\") " pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.273225 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4b6b2be0-00ba-434b-9310-32ee9f286c71-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"4b6b2be0-00ba-434b-9310-32ee9f286c71\") " pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.273266 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-ovsdbserver-nb\") pod \"dnsmasq-dns-bc45f6dcf-8l6f9\" (UID: \"56bbc07d-0741-4064-b135-11222d19a27f\") " pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.273310 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-config\") pod \"dnsmasq-dns-bc45f6dcf-8l6f9\" (UID: \"56bbc07d-0741-4064-b135-11222d19a27f\") " pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.274553 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-dns-svc\") pod \"dnsmasq-dns-bc45f6dcf-8l6f9\" (UID: \"56bbc07d-0741-4064-b135-11222d19a27f\") " pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.275188 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-ovsdbserver-sb\") pod \"dnsmasq-dns-bc45f6dcf-8l6f9\" (UID: \"56bbc07d-0741-4064-b135-11222d19a27f\") " pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.276388 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-config\") pod \"dnsmasq-dns-bc45f6dcf-8l6f9\" (UID: \"56bbc07d-0741-4064-b135-11222d19a27f\") " pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.280751 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-ovsdbserver-nb\") pod \"dnsmasq-dns-bc45f6dcf-8l6f9\" (UID: \"56bbc07d-0741-4064-b135-11222d19a27f\") " pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.304708 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kr2cq\" (UniqueName: \"kubernetes.io/projected/56bbc07d-0741-4064-b135-11222d19a27f-kube-api-access-kr2cq\") pod \"dnsmasq-dns-bc45f6dcf-8l6f9\" (UID: \"56bbc07d-0741-4064-b135-11222d19a27f\") " pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.320305 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-644597f84c-ldfb2" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.375301 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4b6b2be0-00ba-434b-9310-32ee9f286c71-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"4b6b2be0-00ba-434b-9310-32ee9f286c71\") " pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.375698 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4b6b2be0-00ba-434b-9310-32ee9f286c71-scripts\") pod \"ovn-northd-0\" (UID: \"4b6b2be0-00ba-434b-9310-32ee9f286c71\") " pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.375722 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b6b2be0-00ba-434b-9310-32ee9f286c71-config\") pod \"ovn-northd-0\" (UID: \"4b6b2be0-00ba-434b-9310-32ee9f286c71\") " pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.375813 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n99k2\" (UniqueName: \"kubernetes.io/projected/4b6b2be0-00ba-434b-9310-32ee9f286c71-kube-api-access-n99k2\") pod \"ovn-northd-0\" (UID: \"4b6b2be0-00ba-434b-9310-32ee9f286c71\") " pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.375890 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b6b2be0-00ba-434b-9310-32ee9f286c71-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"4b6b2be0-00ba-434b-9310-32ee9f286c71\") " pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.375906 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b6b2be0-00ba-434b-9310-32ee9f286c71-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"4b6b2be0-00ba-434b-9310-32ee9f286c71\") " pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.375934 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b6b2be0-00ba-434b-9310-32ee9f286c71-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"4b6b2be0-00ba-434b-9310-32ee9f286c71\") " pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.376803 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4b6b2be0-00ba-434b-9310-32ee9f286c71-scripts\") pod \"ovn-northd-0\" (UID: \"4b6b2be0-00ba-434b-9310-32ee9f286c71\") " pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.378733 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.378779 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4b6b2be0-00ba-434b-9310-32ee9f286c71-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"4b6b2be0-00ba-434b-9310-32ee9f286c71\") " pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.379242 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b6b2be0-00ba-434b-9310-32ee9f286c71-config\") pod \"ovn-northd-0\" (UID: \"4b6b2be0-00ba-434b-9310-32ee9f286c71\") " pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.381277 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b6b2be0-00ba-434b-9310-32ee9f286c71-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"4b6b2be0-00ba-434b-9310-32ee9f286c71\") " pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.381339 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b6b2be0-00ba-434b-9310-32ee9f286c71-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"4b6b2be0-00ba-434b-9310-32ee9f286c71\") " pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.382939 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b6b2be0-00ba-434b-9310-32ee9f286c71-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"4b6b2be0-00ba-434b-9310-32ee9f286c71\") " pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.398018 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n99k2\" (UniqueName: \"kubernetes.io/projected/4b6b2be0-00ba-434b-9310-32ee9f286c71-kube-api-access-n99k2\") pod \"ovn-northd-0\" (UID: \"4b6b2be0-00ba-434b-9310-32ee9f286c71\") " pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.455105 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.479768 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h8pxl\" (UniqueName: \"kubernetes.io/projected/38070298-7793-4fda-adaf-471d64d2b023-kube-api-access-h8pxl\") pod \"38070298-7793-4fda-adaf-471d64d2b023\" (UID: \"38070298-7793-4fda-adaf-471d64d2b023\") " Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.480793 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38070298-7793-4fda-adaf-471d64d2b023-config\") pod \"38070298-7793-4fda-adaf-471d64d2b023\" (UID: \"38070298-7793-4fda-adaf-471d64d2b023\") " Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.481872 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38070298-7793-4fda-adaf-471d64d2b023-dns-svc\") pod \"38070298-7793-4fda-adaf-471d64d2b023\" (UID: \"38070298-7793-4fda-adaf-471d64d2b023\") " Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.483693 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38070298-7793-4fda-adaf-471d64d2b023-config" (OuterVolumeSpecName: "config") pod "38070298-7793-4fda-adaf-471d64d2b023" (UID: "38070298-7793-4fda-adaf-471d64d2b023"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.484153 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38070298-7793-4fda-adaf-471d64d2b023-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "38070298-7793-4fda-adaf-471d64d2b023" (UID: "38070298-7793-4fda-adaf-471d64d2b023"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.500306 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38070298-7793-4fda-adaf-471d64d2b023-kube-api-access-h8pxl" (OuterVolumeSpecName: "kube-api-access-h8pxl") pod "38070298-7793-4fda-adaf-471d64d2b023" (UID: "38070298-7793-4fda-adaf-471d64d2b023"). InnerVolumeSpecName "kube-api-access-h8pxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.536029 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-644597f84c-ldfb2" event={"ID":"38070298-7793-4fda-adaf-471d64d2b023","Type":"ContainerDied","Data":"7060b54fb486f01017a7a3e2cf2f09cc7c7d094f9725f993f2ddcbae94b14adb"} Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.536150 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-644597f84c-ldfb2" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.587543 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h8pxl\" (UniqueName: \"kubernetes.io/projected/38070298-7793-4fda-adaf-471d64d2b023-kube-api-access-h8pxl\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.587577 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38070298-7793-4fda-adaf-471d64d2b023-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.587610 4710 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38070298-7793-4fda-adaf-471d64d2b023-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.598907 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.648309 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-644597f84c-ldfb2"] Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.651818 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-644597f84c-ldfb2"] Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.676260 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d8fd57975-2s7qz"] Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.783873 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-vcl7d"] Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.828298 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38070298-7793-4fda-adaf-471d64d2b023" path="/var/lib/kubelet/pods/38070298-7793-4fda-adaf-471d64d2b023/volumes" Oct 09 09:19:20 crc kubenswrapper[4710]: I1009 09:19:20.931987 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bc45f6dcf-8l6f9"] Oct 09 09:19:20 crc kubenswrapper[4710]: W1009 09:19:20.940584 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod56bbc07d_0741_4064_b135_11222d19a27f.slice/crio-0e554b021473825c3434485fdb6749b919cbca460f34eb0033129cf8681b1be2 WatchSource:0}: Error finding container 0e554b021473825c3434485fdb6749b919cbca460f34eb0033129cf8681b1be2: Status 404 returned error can't find the container with id 0e554b021473825c3434485fdb6749b919cbca460f34eb0033129cf8681b1be2 Oct 09 09:19:21 crc kubenswrapper[4710]: I1009 09:19:21.076505 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 09 09:19:21 crc kubenswrapper[4710]: I1009 09:19:21.552405 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-vcl7d" event={"ID":"50c1f1ba-8a7d-482b-841f-591355f9dd44","Type":"ContainerStarted","Data":"827519123be7cc2a32af952ea37cd1f32bc1d1389443775140d699a0346a39ba"} Oct 09 09:19:21 crc kubenswrapper[4710]: I1009 09:19:21.553541 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"4b6b2be0-00ba-434b-9310-32ee9f286c71","Type":"ContainerStarted","Data":"cb1c5836b405a27c7f8c3fc891f19f9747855f72f5b0222e3f22e53638fa1fdc"} Oct 09 09:19:21 crc kubenswrapper[4710]: I1009 09:19:21.554250 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" event={"ID":"6e6f7af3-2e0b-4135-9f26-b6912ded0cce","Type":"ContainerStarted","Data":"27ef90185b7d3aad6005190dc996ea199ea4c42229a9d8a67443b2ee5e421d07"} Oct 09 09:19:21 crc kubenswrapper[4710]: I1009 09:19:21.556808 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" event={"ID":"56bbc07d-0741-4064-b135-11222d19a27f","Type":"ContainerStarted","Data":"0e554b021473825c3434485fdb6749b919cbca460f34eb0033129cf8681b1be2"} Oct 09 09:19:22 crc kubenswrapper[4710]: I1009 09:19:22.566335 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-vcl7d" event={"ID":"50c1f1ba-8a7d-482b-841f-591355f9dd44","Type":"ContainerStarted","Data":"188a63583a8d831fbc787822a6a376a092e48de76313fdafbd7f5e69effa196b"} Oct 09 09:19:22 crc kubenswrapper[4710]: I1009 09:19:22.586617 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-vcl7d" podStartSLOduration=3.586597153 podStartE2EDuration="3.586597153s" podCreationTimestamp="2025-10-09 09:19:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:19:22.58302139 +0000 UTC m=+886.073129788" watchObservedRunningTime="2025-10-09 09:19:22.586597153 +0000 UTC m=+886.076705550" Oct 09 09:19:23 crc kubenswrapper[4710]: I1009 09:19:23.578984 4710 generic.go:334] "Generic (PLEG): container finished" podID="c4ddf393-082f-4ea4-9644-c6e03236fc02" containerID="6e34aa52d306c200d072c163552b7b470f0919cb8158aab79ea35c3a2b2a8449" exitCode=0 Oct 09 09:19:23 crc kubenswrapper[4710]: I1009 09:19:23.579080 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77597f887-gzssh" event={"ID":"c4ddf393-082f-4ea4-9644-c6e03236fc02","Type":"ContainerDied","Data":"6e34aa52d306c200d072c163552b7b470f0919cb8158aab79ea35c3a2b2a8449"} Oct 09 09:19:23 crc kubenswrapper[4710]: I1009 09:19:23.582742 4710 generic.go:334] "Generic (PLEG): container finished" podID="6e6f7af3-2e0b-4135-9f26-b6912ded0cce" containerID="f19bd2ff81978132431b1476138faaf7066ed097f6153248052824b66a2db9a0" exitCode=0 Oct 09 09:19:23 crc kubenswrapper[4710]: I1009 09:19:23.582926 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" event={"ID":"6e6f7af3-2e0b-4135-9f26-b6912ded0cce","Type":"ContainerDied","Data":"f19bd2ff81978132431b1476138faaf7066ed097f6153248052824b66a2db9a0"} Oct 09 09:19:23 crc kubenswrapper[4710]: I1009 09:19:23.588013 4710 generic.go:334] "Generic (PLEG): container finished" podID="56bbc07d-0741-4064-b135-11222d19a27f" containerID="4679bba872472372fd50565236d1911daee969cad9d532ab3622f4efdc3ccd63" exitCode=0 Oct 09 09:19:23 crc kubenswrapper[4710]: I1009 09:19:23.588072 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" event={"ID":"56bbc07d-0741-4064-b135-11222d19a27f","Type":"ContainerDied","Data":"4679bba872472372fd50565236d1911daee969cad9d532ab3622f4efdc3ccd63"} Oct 09 09:19:23 crc kubenswrapper[4710]: I1009 09:19:23.849884 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77597f887-gzssh" Oct 09 09:19:23 crc kubenswrapper[4710]: I1009 09:19:23.956463 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cg6ft\" (UniqueName: \"kubernetes.io/projected/c4ddf393-082f-4ea4-9644-c6e03236fc02-kube-api-access-cg6ft\") pod \"c4ddf393-082f-4ea4-9644-c6e03236fc02\" (UID: \"c4ddf393-082f-4ea4-9644-c6e03236fc02\") " Oct 09 09:19:23 crc kubenswrapper[4710]: I1009 09:19:23.956606 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4ddf393-082f-4ea4-9644-c6e03236fc02-config\") pod \"c4ddf393-082f-4ea4-9644-c6e03236fc02\" (UID: \"c4ddf393-082f-4ea4-9644-c6e03236fc02\") " Oct 09 09:19:23 crc kubenswrapper[4710]: I1009 09:19:23.956652 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4ddf393-082f-4ea4-9644-c6e03236fc02-dns-svc\") pod \"c4ddf393-082f-4ea4-9644-c6e03236fc02\" (UID: \"c4ddf393-082f-4ea4-9644-c6e03236fc02\") " Oct 09 09:19:23 crc kubenswrapper[4710]: I1009 09:19:23.962294 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4ddf393-082f-4ea4-9644-c6e03236fc02-kube-api-access-cg6ft" (OuterVolumeSpecName: "kube-api-access-cg6ft") pod "c4ddf393-082f-4ea4-9644-c6e03236fc02" (UID: "c4ddf393-082f-4ea4-9644-c6e03236fc02"). InnerVolumeSpecName "kube-api-access-cg6ft". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:19:23 crc kubenswrapper[4710]: I1009 09:19:23.977187 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4ddf393-082f-4ea4-9644-c6e03236fc02-config" (OuterVolumeSpecName: "config") pod "c4ddf393-082f-4ea4-9644-c6e03236fc02" (UID: "c4ddf393-082f-4ea4-9644-c6e03236fc02"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:19:23 crc kubenswrapper[4710]: I1009 09:19:23.984404 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4ddf393-082f-4ea4-9644-c6e03236fc02-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c4ddf393-082f-4ea4-9644-c6e03236fc02" (UID: "c4ddf393-082f-4ea4-9644-c6e03236fc02"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:19:24 crc kubenswrapper[4710]: I1009 09:19:24.058522 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4ddf393-082f-4ea4-9644-c6e03236fc02-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:24 crc kubenswrapper[4710]: I1009 09:19:24.058555 4710 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4ddf393-082f-4ea4-9644-c6e03236fc02-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:24 crc kubenswrapper[4710]: I1009 09:19:24.058577 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cg6ft\" (UniqueName: \"kubernetes.io/projected/c4ddf393-082f-4ea4-9644-c6e03236fc02-kube-api-access-cg6ft\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:24 crc kubenswrapper[4710]: I1009 09:19:24.599337 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" event={"ID":"6e6f7af3-2e0b-4135-9f26-b6912ded0cce","Type":"ContainerStarted","Data":"03b0e75d9a6942b34c9cadcc40d53d7e482ca23b4c4438c18598d40a2947ae40"} Oct 09 09:19:24 crc kubenswrapper[4710]: I1009 09:19:24.599710 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" Oct 09 09:19:24 crc kubenswrapper[4710]: I1009 09:19:24.602245 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" event={"ID":"56bbc07d-0741-4064-b135-11222d19a27f","Type":"ContainerStarted","Data":"5c6aeedd69b3918fa9d76b3a8e01ae873664a8fb1a69071f0f2d04b28b413185"} Oct 09 09:19:24 crc kubenswrapper[4710]: I1009 09:19:24.602397 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" Oct 09 09:19:24 crc kubenswrapper[4710]: I1009 09:19:24.603642 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"4b6b2be0-00ba-434b-9310-32ee9f286c71","Type":"ContainerStarted","Data":"30af9a492f7a57c4c8f8e59b29d27f96c1de222ff42a5b9d71cb94213f8ed2bf"} Oct 09 09:19:24 crc kubenswrapper[4710]: I1009 09:19:24.606309 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77597f887-gzssh" event={"ID":"c4ddf393-082f-4ea4-9644-c6e03236fc02","Type":"ContainerDied","Data":"f31f5c8de109834ad3615e36ded053a67b812a82b035ebd9c2413efec2e87034"} Oct 09 09:19:24 crc kubenswrapper[4710]: I1009 09:19:24.606359 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77597f887-gzssh" Oct 09 09:19:24 crc kubenswrapper[4710]: I1009 09:19:24.606373 4710 scope.go:117] "RemoveContainer" containerID="6e34aa52d306c200d072c163552b7b470f0919cb8158aab79ea35c3a2b2a8449" Oct 09 09:19:24 crc kubenswrapper[4710]: I1009 09:19:24.622611 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" podStartSLOduration=3.538132578 podStartE2EDuration="5.622597674s" podCreationTimestamp="2025-10-09 09:19:19 +0000 UTC" firstStartedPulling="2025-10-09 09:19:20.701776557 +0000 UTC m=+884.191884955" lastFinishedPulling="2025-10-09 09:19:22.786241654 +0000 UTC m=+886.276350051" observedRunningTime="2025-10-09 09:19:24.614875935 +0000 UTC m=+888.104984332" watchObservedRunningTime="2025-10-09 09:19:24.622597674 +0000 UTC m=+888.112706072" Oct 09 09:19:24 crc kubenswrapper[4710]: I1009 09:19:24.636922 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" podStartSLOduration=2.722286761 podStartE2EDuration="4.636900845s" podCreationTimestamp="2025-10-09 09:19:20 +0000 UTC" firstStartedPulling="2025-10-09 09:19:20.942290112 +0000 UTC m=+884.432398509" lastFinishedPulling="2025-10-09 09:19:22.856904197 +0000 UTC m=+886.347012593" observedRunningTime="2025-10-09 09:19:24.630840878 +0000 UTC m=+888.120949276" watchObservedRunningTime="2025-10-09 09:19:24.636900845 +0000 UTC m=+888.127009242" Oct 09 09:19:24 crc kubenswrapper[4710]: I1009 09:19:24.719174 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77597f887-gzssh"] Oct 09 09:19:24 crc kubenswrapper[4710]: I1009 09:19:24.736106 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77597f887-gzssh"] Oct 09 09:19:24 crc kubenswrapper[4710]: I1009 09:19:24.829037 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4ddf393-082f-4ea4-9644-c6e03236fc02" path="/var/lib/kubelet/pods/c4ddf393-082f-4ea4-9644-c6e03236fc02/volumes" Oct 09 09:19:25 crc kubenswrapper[4710]: I1009 09:19:25.615240 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"4b6b2be0-00ba-434b-9310-32ee9f286c71","Type":"ContainerStarted","Data":"7c45c08a23d1a97e189fa48491c7af36fbfcf14cbc64b71a6ad81f7ea5e5e7c6"} Oct 09 09:19:25 crc kubenswrapper[4710]: I1009 09:19:25.636968 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.333125653 podStartE2EDuration="5.636949465s" podCreationTimestamp="2025-10-09 09:19:20 +0000 UTC" firstStartedPulling="2025-10-09 09:19:21.080050033 +0000 UTC m=+884.570158420" lastFinishedPulling="2025-10-09 09:19:24.383873835 +0000 UTC m=+887.873982232" observedRunningTime="2025-10-09 09:19:25.634672142 +0000 UTC m=+889.124780559" watchObservedRunningTime="2025-10-09 09:19:25.636949465 +0000 UTC m=+889.127057862" Oct 09 09:19:26 crc kubenswrapper[4710]: I1009 09:19:26.622042 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 09 09:19:28 crc kubenswrapper[4710]: I1009 09:19:28.104720 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 09 09:19:28 crc kubenswrapper[4710]: I1009 09:19:28.104807 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 09 09:19:28 crc kubenswrapper[4710]: I1009 09:19:28.144874 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 09 09:19:28 crc kubenswrapper[4710]: I1009 09:19:28.226172 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 09 09:19:28 crc kubenswrapper[4710]: I1009 09:19:28.226214 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 09 09:19:28 crc kubenswrapper[4710]: I1009 09:19:28.262830 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 09 09:19:28 crc kubenswrapper[4710]: I1009 09:19:28.682823 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 09 09:19:28 crc kubenswrapper[4710]: I1009 09:19:28.682911 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 09 09:19:30 crc kubenswrapper[4710]: I1009 09:19:30.177658 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" Oct 09 09:19:30 crc kubenswrapper[4710]: I1009 09:19:30.380088 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" Oct 09 09:19:30 crc kubenswrapper[4710]: I1009 09:19:30.430562 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d8fd57975-2s7qz"] Oct 09 09:19:30 crc kubenswrapper[4710]: I1009 09:19:30.656004 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" podUID="6e6f7af3-2e0b-4135-9f26-b6912ded0cce" containerName="dnsmasq-dns" containerID="cri-o://03b0e75d9a6942b34c9cadcc40d53d7e482ca23b4c4438c18598d40a2947ae40" gracePeriod=10 Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.053787 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.100628 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-dns-svc\") pod \"6e6f7af3-2e0b-4135-9f26-b6912ded0cce\" (UID: \"6e6f7af3-2e0b-4135-9f26-b6912ded0cce\") " Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.100791 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-ovsdbserver-nb\") pod \"6e6f7af3-2e0b-4135-9f26-b6912ded0cce\" (UID: \"6e6f7af3-2e0b-4135-9f26-b6912ded0cce\") " Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.100855 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnwck\" (UniqueName: \"kubernetes.io/projected/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-kube-api-access-qnwck\") pod \"6e6f7af3-2e0b-4135-9f26-b6912ded0cce\" (UID: \"6e6f7af3-2e0b-4135-9f26-b6912ded0cce\") " Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.100919 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-config\") pod \"6e6f7af3-2e0b-4135-9f26-b6912ded0cce\" (UID: \"6e6f7af3-2e0b-4135-9f26-b6912ded0cce\") " Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.107721 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-kube-api-access-qnwck" (OuterVolumeSpecName: "kube-api-access-qnwck") pod "6e6f7af3-2e0b-4135-9f26-b6912ded0cce" (UID: "6e6f7af3-2e0b-4135-9f26-b6912ded0cce"). InnerVolumeSpecName "kube-api-access-qnwck". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.140788 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6e6f7af3-2e0b-4135-9f26-b6912ded0cce" (UID: "6e6f7af3-2e0b-4135-9f26-b6912ded0cce"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.142185 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-config" (OuterVolumeSpecName: "config") pod "6e6f7af3-2e0b-4135-9f26-b6912ded0cce" (UID: "6e6f7af3-2e0b-4135-9f26-b6912ded0cce"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.151209 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6e6f7af3-2e0b-4135-9f26-b6912ded0cce" (UID: "6e6f7af3-2e0b-4135-9f26-b6912ded0cce"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.203937 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.203971 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnwck\" (UniqueName: \"kubernetes.io/projected/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-kube-api-access-qnwck\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.203986 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.203997 4710 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6e6f7af3-2e0b-4135-9f26-b6912ded0cce-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.667272 4710 generic.go:334] "Generic (PLEG): container finished" podID="6e6f7af3-2e0b-4135-9f26-b6912ded0cce" containerID="03b0e75d9a6942b34c9cadcc40d53d7e482ca23b4c4438c18598d40a2947ae40" exitCode=0 Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.667326 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" event={"ID":"6e6f7af3-2e0b-4135-9f26-b6912ded0cce","Type":"ContainerDied","Data":"03b0e75d9a6942b34c9cadcc40d53d7e482ca23b4c4438c18598d40a2947ae40"} Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.667367 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" event={"ID":"6e6f7af3-2e0b-4135-9f26-b6912ded0cce","Type":"ContainerDied","Data":"27ef90185b7d3aad6005190dc996ea199ea4c42229a9d8a67443b2ee5e421d07"} Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.667386 4710 scope.go:117] "RemoveContainer" containerID="03b0e75d9a6942b34c9cadcc40d53d7e482ca23b4c4438c18598d40a2947ae40" Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.667701 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d8fd57975-2s7qz" Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.689247 4710 scope.go:117] "RemoveContainer" containerID="f19bd2ff81978132431b1476138faaf7066ed097f6153248052824b66a2db9a0" Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.704795 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d8fd57975-2s7qz"] Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.709671 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6d8fd57975-2s7qz"] Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.722384 4710 scope.go:117] "RemoveContainer" containerID="03b0e75d9a6942b34c9cadcc40d53d7e482ca23b4c4438c18598d40a2947ae40" Oct 09 09:19:31 crc kubenswrapper[4710]: E1009 09:19:31.722852 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03b0e75d9a6942b34c9cadcc40d53d7e482ca23b4c4438c18598d40a2947ae40\": container with ID starting with 03b0e75d9a6942b34c9cadcc40d53d7e482ca23b4c4438c18598d40a2947ae40 not found: ID does not exist" containerID="03b0e75d9a6942b34c9cadcc40d53d7e482ca23b4c4438c18598d40a2947ae40" Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.722938 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03b0e75d9a6942b34c9cadcc40d53d7e482ca23b4c4438c18598d40a2947ae40"} err="failed to get container status \"03b0e75d9a6942b34c9cadcc40d53d7e482ca23b4c4438c18598d40a2947ae40\": rpc error: code = NotFound desc = could not find container \"03b0e75d9a6942b34c9cadcc40d53d7e482ca23b4c4438c18598d40a2947ae40\": container with ID starting with 03b0e75d9a6942b34c9cadcc40d53d7e482ca23b4c4438c18598d40a2947ae40 not found: ID does not exist" Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.723013 4710 scope.go:117] "RemoveContainer" containerID="f19bd2ff81978132431b1476138faaf7066ed097f6153248052824b66a2db9a0" Oct 09 09:19:31 crc kubenswrapper[4710]: E1009 09:19:31.723341 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f19bd2ff81978132431b1476138faaf7066ed097f6153248052824b66a2db9a0\": container with ID starting with f19bd2ff81978132431b1476138faaf7066ed097f6153248052824b66a2db9a0 not found: ID does not exist" containerID="f19bd2ff81978132431b1476138faaf7066ed097f6153248052824b66a2db9a0" Oct 09 09:19:31 crc kubenswrapper[4710]: I1009 09:19:31.723366 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f19bd2ff81978132431b1476138faaf7066ed097f6153248052824b66a2db9a0"} err="failed to get container status \"f19bd2ff81978132431b1476138faaf7066ed097f6153248052824b66a2db9a0\": rpc error: code = NotFound desc = could not find container \"f19bd2ff81978132431b1476138faaf7066ed097f6153248052824b66a2db9a0\": container with ID starting with f19bd2ff81978132431b1476138faaf7066ed097f6153248052824b66a2db9a0 not found: ID does not exist" Oct 09 09:19:32 crc kubenswrapper[4710]: I1009 09:19:32.825989 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e6f7af3-2e0b-4135-9f26-b6912ded0cce" path="/var/lib/kubelet/pods/6e6f7af3-2e0b-4135-9f26-b6912ded0cce/volumes" Oct 09 09:19:35 crc kubenswrapper[4710]: I1009 09:19:35.545936 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:19:35 crc kubenswrapper[4710]: I1009 09:19:35.546303 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:19:35 crc kubenswrapper[4710]: I1009 09:19:35.650528 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.361086 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-lzhjx"] Oct 09 09:19:38 crc kubenswrapper[4710]: E1009 09:19:38.361893 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e6f7af3-2e0b-4135-9f26-b6912ded0cce" containerName="dnsmasq-dns" Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.361909 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e6f7af3-2e0b-4135-9f26-b6912ded0cce" containerName="dnsmasq-dns" Oct 09 09:19:38 crc kubenswrapper[4710]: E1009 09:19:38.361925 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e6f7af3-2e0b-4135-9f26-b6912ded0cce" containerName="init" Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.361931 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e6f7af3-2e0b-4135-9f26-b6912ded0cce" containerName="init" Oct 09 09:19:38 crc kubenswrapper[4710]: E1009 09:19:38.361939 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4ddf393-082f-4ea4-9644-c6e03236fc02" containerName="init" Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.361944 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4ddf393-082f-4ea4-9644-c6e03236fc02" containerName="init" Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.362137 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4ddf393-082f-4ea4-9644-c6e03236fc02" containerName="init" Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.362147 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e6f7af3-2e0b-4135-9f26-b6912ded0cce" containerName="dnsmasq-dns" Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.362759 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-lzhjx" Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.371963 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-lzhjx"] Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.449710 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zz22x\" (UniqueName: \"kubernetes.io/projected/aae244ab-69b2-4a70-bf4a-c0f4bbf6ec9a-kube-api-access-zz22x\") pod \"keystone-db-create-lzhjx\" (UID: \"aae244ab-69b2-4a70-bf4a-c0f4bbf6ec9a\") " pod="openstack/keystone-db-create-lzhjx" Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.551517 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zz22x\" (UniqueName: \"kubernetes.io/projected/aae244ab-69b2-4a70-bf4a-c0f4bbf6ec9a-kube-api-access-zz22x\") pod \"keystone-db-create-lzhjx\" (UID: \"aae244ab-69b2-4a70-bf4a-c0f4bbf6ec9a\") " pod="openstack/keystone-db-create-lzhjx" Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.556588 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-8wst4"] Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.557561 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-8wst4" Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.565235 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-8wst4"] Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.573077 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zz22x\" (UniqueName: \"kubernetes.io/projected/aae244ab-69b2-4a70-bf4a-c0f4bbf6ec9a-kube-api-access-zz22x\") pod \"keystone-db-create-lzhjx\" (UID: \"aae244ab-69b2-4a70-bf4a-c0f4bbf6ec9a\") " pod="openstack/keystone-db-create-lzhjx" Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.653558 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8gns\" (UniqueName: \"kubernetes.io/projected/169a7e02-2da0-44b5-916f-2d10ad521e61-kube-api-access-x8gns\") pod \"placement-db-create-8wst4\" (UID: \"169a7e02-2da0-44b5-916f-2d10ad521e61\") " pod="openstack/placement-db-create-8wst4" Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.676023 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-lzhjx" Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.732344 4710 generic.go:334] "Generic (PLEG): container finished" podID="66a04a07-2cc5-4549-9217-d5fbb82a6755" containerID="af95ef995d1637c54da57d37bb05910a94919fa0b7fec16b0dc08edb0af96906" exitCode=0 Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.732482 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"66a04a07-2cc5-4549-9217-d5fbb82a6755","Type":"ContainerDied","Data":"af95ef995d1637c54da57d37bb05910a94919fa0b7fec16b0dc08edb0af96906"} Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.736862 4710 generic.go:334] "Generic (PLEG): container finished" podID="b54a6a74-e8d4-4e48-b7dc-7805027dce53" containerID="2f54f5b6b2db40672622d4587eeace11474707658f13f2878be4600122b0ee55" exitCode=0 Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.736965 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b54a6a74-e8d4-4e48-b7dc-7805027dce53","Type":"ContainerDied","Data":"2f54f5b6b2db40672622d4587eeace11474707658f13f2878be4600122b0ee55"} Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.757306 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8gns\" (UniqueName: \"kubernetes.io/projected/169a7e02-2da0-44b5-916f-2d10ad521e61-kube-api-access-x8gns\") pod \"placement-db-create-8wst4\" (UID: \"169a7e02-2da0-44b5-916f-2d10ad521e61\") " pod="openstack/placement-db-create-8wst4" Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.808979 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8gns\" (UniqueName: \"kubernetes.io/projected/169a7e02-2da0-44b5-916f-2d10ad521e61-kube-api-access-x8gns\") pod \"placement-db-create-8wst4\" (UID: \"169a7e02-2da0-44b5-916f-2d10ad521e61\") " pod="openstack/placement-db-create-8wst4" Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.812451 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-khqb9"] Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.823681 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-khqb9" Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.834861 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-khqb9"] Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.905986 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-8wst4" Oct 09 09:19:38 crc kubenswrapper[4710]: I1009 09:19:38.962440 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgxvx\" (UniqueName: \"kubernetes.io/projected/997f4298-9244-4ef3-8783-c4e68d569407-kube-api-access-bgxvx\") pod \"glance-db-create-khqb9\" (UID: \"997f4298-9244-4ef3-8783-c4e68d569407\") " pod="openstack/glance-db-create-khqb9" Oct 09 09:19:39 crc kubenswrapper[4710]: I1009 09:19:39.064788 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgxvx\" (UniqueName: \"kubernetes.io/projected/997f4298-9244-4ef3-8783-c4e68d569407-kube-api-access-bgxvx\") pod \"glance-db-create-khqb9\" (UID: \"997f4298-9244-4ef3-8783-c4e68d569407\") " pod="openstack/glance-db-create-khqb9" Oct 09 09:19:39 crc kubenswrapper[4710]: I1009 09:19:39.087835 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgxvx\" (UniqueName: \"kubernetes.io/projected/997f4298-9244-4ef3-8783-c4e68d569407-kube-api-access-bgxvx\") pod \"glance-db-create-khqb9\" (UID: \"997f4298-9244-4ef3-8783-c4e68d569407\") " pod="openstack/glance-db-create-khqb9" Oct 09 09:19:39 crc kubenswrapper[4710]: I1009 09:19:39.115554 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-lzhjx"] Oct 09 09:19:39 crc kubenswrapper[4710]: I1009 09:19:39.151689 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-khqb9" Oct 09 09:19:39 crc kubenswrapper[4710]: I1009 09:19:39.296080 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-8wst4"] Oct 09 09:19:39 crc kubenswrapper[4710]: I1009 09:19:39.415540 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-khqb9"] Oct 09 09:19:39 crc kubenswrapper[4710]: W1009 09:19:39.416140 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod997f4298_9244_4ef3_8783_c4e68d569407.slice/crio-952303ba060798633fb527bd84228d01377aa28a42cdaaf04ad43d510f43dd2e WatchSource:0}: Error finding container 952303ba060798633fb527bd84228d01377aa28a42cdaaf04ad43d510f43dd2e: Status 404 returned error can't find the container with id 952303ba060798633fb527bd84228d01377aa28a42cdaaf04ad43d510f43dd2e Oct 09 09:19:39 crc kubenswrapper[4710]: I1009 09:19:39.745475 4710 generic.go:334] "Generic (PLEG): container finished" podID="aae244ab-69b2-4a70-bf4a-c0f4bbf6ec9a" containerID="e8b6efdad85b29380f15ed8793f18c76d3c1669d0996d208e4a0f71a83ebdee1" exitCode=0 Oct 09 09:19:39 crc kubenswrapper[4710]: I1009 09:19:39.745558 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-lzhjx" event={"ID":"aae244ab-69b2-4a70-bf4a-c0f4bbf6ec9a","Type":"ContainerDied","Data":"e8b6efdad85b29380f15ed8793f18c76d3c1669d0996d208e4a0f71a83ebdee1"} Oct 09 09:19:39 crc kubenswrapper[4710]: I1009 09:19:39.745591 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-lzhjx" event={"ID":"aae244ab-69b2-4a70-bf4a-c0f4bbf6ec9a","Type":"ContainerStarted","Data":"b40c2d3e29e7aea7846c8cff348de7e9b7cd4f9dffe6506d006d9b5d9c16abfc"} Oct 09 09:19:39 crc kubenswrapper[4710]: I1009 09:19:39.747519 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"66a04a07-2cc5-4549-9217-d5fbb82a6755","Type":"ContainerStarted","Data":"a286398dc39e1bfee3caebb8615215907b9a58666084030912c01e9664e1d84c"} Oct 09 09:19:39 crc kubenswrapper[4710]: I1009 09:19:39.747753 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:19:39 crc kubenswrapper[4710]: I1009 09:19:39.749380 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b54a6a74-e8d4-4e48-b7dc-7805027dce53","Type":"ContainerStarted","Data":"7e907b39a89c5b048812250c378e5e35adcd718098aa2955bc803d381e27613d"} Oct 09 09:19:39 crc kubenswrapper[4710]: I1009 09:19:39.749845 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 09 09:19:39 crc kubenswrapper[4710]: I1009 09:19:39.750863 4710 generic.go:334] "Generic (PLEG): container finished" podID="997f4298-9244-4ef3-8783-c4e68d569407" containerID="fad02ba2af5668af8bfff3f587a1373229a3015a4a366b720f2a90fc0058146c" exitCode=0 Oct 09 09:19:39 crc kubenswrapper[4710]: I1009 09:19:39.750913 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-khqb9" event={"ID":"997f4298-9244-4ef3-8783-c4e68d569407","Type":"ContainerDied","Data":"fad02ba2af5668af8bfff3f587a1373229a3015a4a366b720f2a90fc0058146c"} Oct 09 09:19:39 crc kubenswrapper[4710]: I1009 09:19:39.750929 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-khqb9" event={"ID":"997f4298-9244-4ef3-8783-c4e68d569407","Type":"ContainerStarted","Data":"952303ba060798633fb527bd84228d01377aa28a42cdaaf04ad43d510f43dd2e"} Oct 09 09:19:39 crc kubenswrapper[4710]: I1009 09:19:39.752001 4710 generic.go:334] "Generic (PLEG): container finished" podID="169a7e02-2da0-44b5-916f-2d10ad521e61" containerID="726fc2063daddfb016c335b11fb00bbbe4f98f582ca4b36f4199234e97eaec84" exitCode=0 Oct 09 09:19:39 crc kubenswrapper[4710]: I1009 09:19:39.752028 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-8wst4" event={"ID":"169a7e02-2da0-44b5-916f-2d10ad521e61","Type":"ContainerDied","Data":"726fc2063daddfb016c335b11fb00bbbe4f98f582ca4b36f4199234e97eaec84"} Oct 09 09:19:39 crc kubenswrapper[4710]: I1009 09:19:39.752042 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-8wst4" event={"ID":"169a7e02-2da0-44b5-916f-2d10ad521e61","Type":"ContainerStarted","Data":"b01fa31778a2b65f16c440af29000446925e0675c457eebd7dd309d3d05bfac7"} Oct 09 09:19:39 crc kubenswrapper[4710]: I1009 09:19:39.819089 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=45.360050332 podStartE2EDuration="56.819068394s" podCreationTimestamp="2025-10-09 09:18:43 +0000 UTC" firstStartedPulling="2025-10-09 09:18:54.013640476 +0000 UTC m=+857.503748874" lastFinishedPulling="2025-10-09 09:19:05.472658549 +0000 UTC m=+868.962766936" observedRunningTime="2025-10-09 09:19:39.813369909 +0000 UTC m=+903.303478305" watchObservedRunningTime="2025-10-09 09:19:39.819068394 +0000 UTC m=+903.309176791" Oct 09 09:19:39 crc kubenswrapper[4710]: I1009 09:19:39.855781 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=44.213729617 podStartE2EDuration="55.85574438s" podCreationTimestamp="2025-10-09 09:18:44 +0000 UTC" firstStartedPulling="2025-10-09 09:18:54.014686679 +0000 UTC m=+857.504795076" lastFinishedPulling="2025-10-09 09:19:05.656701442 +0000 UTC m=+869.146809839" observedRunningTime="2025-10-09 09:19:39.85325709 +0000 UTC m=+903.343365488" watchObservedRunningTime="2025-10-09 09:19:39.85574438 +0000 UTC m=+903.345852777" Oct 09 09:19:41 crc kubenswrapper[4710]: I1009 09:19:41.145064 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-lzhjx" Oct 09 09:19:41 crc kubenswrapper[4710]: I1009 09:19:41.155421 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-khqb9" Oct 09 09:19:41 crc kubenswrapper[4710]: I1009 09:19:41.159101 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-8wst4" Oct 09 09:19:41 crc kubenswrapper[4710]: I1009 09:19:41.217407 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bgxvx\" (UniqueName: \"kubernetes.io/projected/997f4298-9244-4ef3-8783-c4e68d569407-kube-api-access-bgxvx\") pod \"997f4298-9244-4ef3-8783-c4e68d569407\" (UID: \"997f4298-9244-4ef3-8783-c4e68d569407\") " Oct 09 09:19:41 crc kubenswrapper[4710]: I1009 09:19:41.217676 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8gns\" (UniqueName: \"kubernetes.io/projected/169a7e02-2da0-44b5-916f-2d10ad521e61-kube-api-access-x8gns\") pod \"169a7e02-2da0-44b5-916f-2d10ad521e61\" (UID: \"169a7e02-2da0-44b5-916f-2d10ad521e61\") " Oct 09 09:19:41 crc kubenswrapper[4710]: I1009 09:19:41.217955 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zz22x\" (UniqueName: \"kubernetes.io/projected/aae244ab-69b2-4a70-bf4a-c0f4bbf6ec9a-kube-api-access-zz22x\") pod \"aae244ab-69b2-4a70-bf4a-c0f4bbf6ec9a\" (UID: \"aae244ab-69b2-4a70-bf4a-c0f4bbf6ec9a\") " Oct 09 09:19:41 crc kubenswrapper[4710]: I1009 09:19:41.225355 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aae244ab-69b2-4a70-bf4a-c0f4bbf6ec9a-kube-api-access-zz22x" (OuterVolumeSpecName: "kube-api-access-zz22x") pod "aae244ab-69b2-4a70-bf4a-c0f4bbf6ec9a" (UID: "aae244ab-69b2-4a70-bf4a-c0f4bbf6ec9a"). InnerVolumeSpecName "kube-api-access-zz22x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:19:41 crc kubenswrapper[4710]: I1009 09:19:41.234292 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/169a7e02-2da0-44b5-916f-2d10ad521e61-kube-api-access-x8gns" (OuterVolumeSpecName: "kube-api-access-x8gns") pod "169a7e02-2da0-44b5-916f-2d10ad521e61" (UID: "169a7e02-2da0-44b5-916f-2d10ad521e61"). InnerVolumeSpecName "kube-api-access-x8gns". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:19:41 crc kubenswrapper[4710]: I1009 09:19:41.241927 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/997f4298-9244-4ef3-8783-c4e68d569407-kube-api-access-bgxvx" (OuterVolumeSpecName: "kube-api-access-bgxvx") pod "997f4298-9244-4ef3-8783-c4e68d569407" (UID: "997f4298-9244-4ef3-8783-c4e68d569407"). InnerVolumeSpecName "kube-api-access-bgxvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:19:41 crc kubenswrapper[4710]: I1009 09:19:41.321206 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bgxvx\" (UniqueName: \"kubernetes.io/projected/997f4298-9244-4ef3-8783-c4e68d569407-kube-api-access-bgxvx\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:41 crc kubenswrapper[4710]: I1009 09:19:41.321246 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8gns\" (UniqueName: \"kubernetes.io/projected/169a7e02-2da0-44b5-916f-2d10ad521e61-kube-api-access-x8gns\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:41 crc kubenswrapper[4710]: I1009 09:19:41.321262 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zz22x\" (UniqueName: \"kubernetes.io/projected/aae244ab-69b2-4a70-bf4a-c0f4bbf6ec9a-kube-api-access-zz22x\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:41 crc kubenswrapper[4710]: I1009 09:19:41.769854 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-lzhjx" event={"ID":"aae244ab-69b2-4a70-bf4a-c0f4bbf6ec9a","Type":"ContainerDied","Data":"b40c2d3e29e7aea7846c8cff348de7e9b7cd4f9dffe6506d006d9b5d9c16abfc"} Oct 09 09:19:41 crc kubenswrapper[4710]: I1009 09:19:41.769922 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b40c2d3e29e7aea7846c8cff348de7e9b7cd4f9dffe6506d006d9b5d9c16abfc" Oct 09 09:19:41 crc kubenswrapper[4710]: I1009 09:19:41.770116 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-lzhjx" Oct 09 09:19:41 crc kubenswrapper[4710]: I1009 09:19:41.772105 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-khqb9" Oct 09 09:19:41 crc kubenswrapper[4710]: I1009 09:19:41.772538 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-khqb9" event={"ID":"997f4298-9244-4ef3-8783-c4e68d569407","Type":"ContainerDied","Data":"952303ba060798633fb527bd84228d01377aa28a42cdaaf04ad43d510f43dd2e"} Oct 09 09:19:41 crc kubenswrapper[4710]: I1009 09:19:41.772592 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="952303ba060798633fb527bd84228d01377aa28a42cdaaf04ad43d510f43dd2e" Oct 09 09:19:41 crc kubenswrapper[4710]: I1009 09:19:41.775039 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-8wst4" event={"ID":"169a7e02-2da0-44b5-916f-2d10ad521e61","Type":"ContainerDied","Data":"b01fa31778a2b65f16c440af29000446925e0675c457eebd7dd309d3d05bfac7"} Oct 09 09:19:41 crc kubenswrapper[4710]: I1009 09:19:41.775084 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-8wst4" Oct 09 09:19:41 crc kubenswrapper[4710]: I1009 09:19:41.775101 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b01fa31778a2b65f16c440af29000446925e0675c457eebd7dd309d3d05bfac7" Oct 09 09:19:44 crc kubenswrapper[4710]: I1009 09:19:44.663120 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-ff4n7" podUID="8c4f1833-b6b6-4c51-bd5e-0b4cf749e848" containerName="ovn-controller" probeResult="failure" output=< Oct 09 09:19:44 crc kubenswrapper[4710]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 09 09:19:44 crc kubenswrapper[4710]: > Oct 09 09:19:44 crc kubenswrapper[4710]: I1009 09:19:44.672036 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:19:44 crc kubenswrapper[4710]: I1009 09:19:44.674053 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-b2pbr" Oct 09 09:19:44 crc kubenswrapper[4710]: I1009 09:19:44.876915 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ff4n7-config-cv62h"] Oct 09 09:19:44 crc kubenswrapper[4710]: E1009 09:19:44.877605 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aae244ab-69b2-4a70-bf4a-c0f4bbf6ec9a" containerName="mariadb-database-create" Oct 09 09:19:44 crc kubenswrapper[4710]: I1009 09:19:44.877652 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="aae244ab-69b2-4a70-bf4a-c0f4bbf6ec9a" containerName="mariadb-database-create" Oct 09 09:19:44 crc kubenswrapper[4710]: E1009 09:19:44.877675 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="997f4298-9244-4ef3-8783-c4e68d569407" containerName="mariadb-database-create" Oct 09 09:19:44 crc kubenswrapper[4710]: I1009 09:19:44.877683 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="997f4298-9244-4ef3-8783-c4e68d569407" containerName="mariadb-database-create" Oct 09 09:19:44 crc kubenswrapper[4710]: E1009 09:19:44.877701 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="169a7e02-2da0-44b5-916f-2d10ad521e61" containerName="mariadb-database-create" Oct 09 09:19:44 crc kubenswrapper[4710]: I1009 09:19:44.877710 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="169a7e02-2da0-44b5-916f-2d10ad521e61" containerName="mariadb-database-create" Oct 09 09:19:44 crc kubenswrapper[4710]: I1009 09:19:44.878010 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="169a7e02-2da0-44b5-916f-2d10ad521e61" containerName="mariadb-database-create" Oct 09 09:19:44 crc kubenswrapper[4710]: I1009 09:19:44.878033 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="aae244ab-69b2-4a70-bf4a-c0f4bbf6ec9a" containerName="mariadb-database-create" Oct 09 09:19:44 crc kubenswrapper[4710]: I1009 09:19:44.878059 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="997f4298-9244-4ef3-8783-c4e68d569407" containerName="mariadb-database-create" Oct 09 09:19:44 crc kubenswrapper[4710]: I1009 09:19:44.879028 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ff4n7-config-cv62h" Oct 09 09:19:44 crc kubenswrapper[4710]: I1009 09:19:44.882800 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 09 09:19:44 crc kubenswrapper[4710]: I1009 09:19:44.893310 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ff4n7-config-cv62h"] Oct 09 09:19:44 crc kubenswrapper[4710]: I1009 09:19:44.980480 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7685d25d-2f14-4213-b78b-f9e2c22c6df0-var-run-ovn\") pod \"ovn-controller-ff4n7-config-cv62h\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " pod="openstack/ovn-controller-ff4n7-config-cv62h" Oct 09 09:19:44 crc kubenswrapper[4710]: I1009 09:19:44.980543 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7685d25d-2f14-4213-b78b-f9e2c22c6df0-var-log-ovn\") pod \"ovn-controller-ff4n7-config-cv62h\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " pod="openstack/ovn-controller-ff4n7-config-cv62h" Oct 09 09:19:44 crc kubenswrapper[4710]: I1009 09:19:44.980603 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7v5zj\" (UniqueName: \"kubernetes.io/projected/7685d25d-2f14-4213-b78b-f9e2c22c6df0-kube-api-access-7v5zj\") pod \"ovn-controller-ff4n7-config-cv62h\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " pod="openstack/ovn-controller-ff4n7-config-cv62h" Oct 09 09:19:44 crc kubenswrapper[4710]: I1009 09:19:44.980633 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7685d25d-2f14-4213-b78b-f9e2c22c6df0-var-run\") pod \"ovn-controller-ff4n7-config-cv62h\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " pod="openstack/ovn-controller-ff4n7-config-cv62h" Oct 09 09:19:44 crc kubenswrapper[4710]: I1009 09:19:44.980670 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7685d25d-2f14-4213-b78b-f9e2c22c6df0-scripts\") pod \"ovn-controller-ff4n7-config-cv62h\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " pod="openstack/ovn-controller-ff4n7-config-cv62h" Oct 09 09:19:44 crc kubenswrapper[4710]: I1009 09:19:44.980739 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7685d25d-2f14-4213-b78b-f9e2c22c6df0-additional-scripts\") pod \"ovn-controller-ff4n7-config-cv62h\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " pod="openstack/ovn-controller-ff4n7-config-cv62h" Oct 09 09:19:45 crc kubenswrapper[4710]: I1009 09:19:45.082558 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7685d25d-2f14-4213-b78b-f9e2c22c6df0-additional-scripts\") pod \"ovn-controller-ff4n7-config-cv62h\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " pod="openstack/ovn-controller-ff4n7-config-cv62h" Oct 09 09:19:45 crc kubenswrapper[4710]: I1009 09:19:45.082717 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7685d25d-2f14-4213-b78b-f9e2c22c6df0-var-run-ovn\") pod \"ovn-controller-ff4n7-config-cv62h\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " pod="openstack/ovn-controller-ff4n7-config-cv62h" Oct 09 09:19:45 crc kubenswrapper[4710]: I1009 09:19:45.082786 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7685d25d-2f14-4213-b78b-f9e2c22c6df0-var-log-ovn\") pod \"ovn-controller-ff4n7-config-cv62h\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " pod="openstack/ovn-controller-ff4n7-config-cv62h" Oct 09 09:19:45 crc kubenswrapper[4710]: I1009 09:19:45.082835 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7v5zj\" (UniqueName: \"kubernetes.io/projected/7685d25d-2f14-4213-b78b-f9e2c22c6df0-kube-api-access-7v5zj\") pod \"ovn-controller-ff4n7-config-cv62h\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " pod="openstack/ovn-controller-ff4n7-config-cv62h" Oct 09 09:19:45 crc kubenswrapper[4710]: I1009 09:19:45.082865 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7685d25d-2f14-4213-b78b-f9e2c22c6df0-var-run\") pod \"ovn-controller-ff4n7-config-cv62h\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " pod="openstack/ovn-controller-ff4n7-config-cv62h" Oct 09 09:19:45 crc kubenswrapper[4710]: I1009 09:19:45.082910 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7685d25d-2f14-4213-b78b-f9e2c22c6df0-scripts\") pod \"ovn-controller-ff4n7-config-cv62h\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " pod="openstack/ovn-controller-ff4n7-config-cv62h" Oct 09 09:19:45 crc kubenswrapper[4710]: I1009 09:19:45.083642 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7685d25d-2f14-4213-b78b-f9e2c22c6df0-additional-scripts\") pod \"ovn-controller-ff4n7-config-cv62h\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " pod="openstack/ovn-controller-ff4n7-config-cv62h" Oct 09 09:19:45 crc kubenswrapper[4710]: I1009 09:19:45.083728 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7685d25d-2f14-4213-b78b-f9e2c22c6df0-var-run\") pod \"ovn-controller-ff4n7-config-cv62h\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " pod="openstack/ovn-controller-ff4n7-config-cv62h" Oct 09 09:19:45 crc kubenswrapper[4710]: I1009 09:19:45.083729 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7685d25d-2f14-4213-b78b-f9e2c22c6df0-var-run-ovn\") pod \"ovn-controller-ff4n7-config-cv62h\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " pod="openstack/ovn-controller-ff4n7-config-cv62h" Oct 09 09:19:45 crc kubenswrapper[4710]: I1009 09:19:45.083906 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7685d25d-2f14-4213-b78b-f9e2c22c6df0-var-log-ovn\") pod \"ovn-controller-ff4n7-config-cv62h\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " pod="openstack/ovn-controller-ff4n7-config-cv62h" Oct 09 09:19:45 crc kubenswrapper[4710]: I1009 09:19:45.084733 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7685d25d-2f14-4213-b78b-f9e2c22c6df0-scripts\") pod \"ovn-controller-ff4n7-config-cv62h\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " pod="openstack/ovn-controller-ff4n7-config-cv62h" Oct 09 09:19:45 crc kubenswrapper[4710]: I1009 09:19:45.104859 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7v5zj\" (UniqueName: \"kubernetes.io/projected/7685d25d-2f14-4213-b78b-f9e2c22c6df0-kube-api-access-7v5zj\") pod \"ovn-controller-ff4n7-config-cv62h\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " pod="openstack/ovn-controller-ff4n7-config-cv62h" Oct 09 09:19:45 crc kubenswrapper[4710]: I1009 09:19:45.198851 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ff4n7-config-cv62h" Oct 09 09:19:45 crc kubenswrapper[4710]: I1009 09:19:45.629299 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ff4n7-config-cv62h"] Oct 09 09:19:45 crc kubenswrapper[4710]: I1009 09:19:45.810455 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ff4n7-config-cv62h" event={"ID":"7685d25d-2f14-4213-b78b-f9e2c22c6df0","Type":"ContainerStarted","Data":"3e4135656ab6291d1bbab16d8ee71a23d9d31d2e02a4bc84996971e60afea18c"} Oct 09 09:19:46 crc kubenswrapper[4710]: I1009 09:19:46.826258 4710 generic.go:334] "Generic (PLEG): container finished" podID="7685d25d-2f14-4213-b78b-f9e2c22c6df0" containerID="995b1658d5302a0ac1efc1491ddf0fa156bbe9c56cef16ae0e85c8ee3f8e81c5" exitCode=0 Oct 09 09:19:46 crc kubenswrapper[4710]: I1009 09:19:46.826638 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ff4n7-config-cv62h" event={"ID":"7685d25d-2f14-4213-b78b-f9e2c22c6df0","Type":"ContainerDied","Data":"995b1658d5302a0ac1efc1491ddf0fa156bbe9c56cef16ae0e85c8ee3f8e81c5"} Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.071089 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ff4n7-config-cv62h" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.140986 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7v5zj\" (UniqueName: \"kubernetes.io/projected/7685d25d-2f14-4213-b78b-f9e2c22c6df0-kube-api-access-7v5zj\") pod \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.141047 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7685d25d-2f14-4213-b78b-f9e2c22c6df0-scripts\") pod \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.141075 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7685d25d-2f14-4213-b78b-f9e2c22c6df0-var-run-ovn\") pod \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.141174 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7685d25d-2f14-4213-b78b-f9e2c22c6df0-var-run\") pod \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.141283 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7685d25d-2f14-4213-b78b-f9e2c22c6df0-var-log-ovn\") pod \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.141364 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7685d25d-2f14-4213-b78b-f9e2c22c6df0-additional-scripts\") pod \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\" (UID: \"7685d25d-2f14-4213-b78b-f9e2c22c6df0\") " Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.142028 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7685d25d-2f14-4213-b78b-f9e2c22c6df0-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "7685d25d-2f14-4213-b78b-f9e2c22c6df0" (UID: "7685d25d-2f14-4213-b78b-f9e2c22c6df0"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.142466 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7685d25d-2f14-4213-b78b-f9e2c22c6df0-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "7685d25d-2f14-4213-b78b-f9e2c22c6df0" (UID: "7685d25d-2f14-4213-b78b-f9e2c22c6df0"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.142517 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7685d25d-2f14-4213-b78b-f9e2c22c6df0-var-run" (OuterVolumeSpecName: "var-run") pod "7685d25d-2f14-4213-b78b-f9e2c22c6df0" (UID: "7685d25d-2f14-4213-b78b-f9e2c22c6df0"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.142538 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7685d25d-2f14-4213-b78b-f9e2c22c6df0-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "7685d25d-2f14-4213-b78b-f9e2c22c6df0" (UID: "7685d25d-2f14-4213-b78b-f9e2c22c6df0"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.143124 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7685d25d-2f14-4213-b78b-f9e2c22c6df0-scripts" (OuterVolumeSpecName: "scripts") pod "7685d25d-2f14-4213-b78b-f9e2c22c6df0" (UID: "7685d25d-2f14-4213-b78b-f9e2c22c6df0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.146893 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7685d25d-2f14-4213-b78b-f9e2c22c6df0-kube-api-access-7v5zj" (OuterVolumeSpecName: "kube-api-access-7v5zj") pod "7685d25d-2f14-4213-b78b-f9e2c22c6df0" (UID: "7685d25d-2f14-4213-b78b-f9e2c22c6df0"). InnerVolumeSpecName "kube-api-access-7v5zj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.244053 4710 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7685d25d-2f14-4213-b78b-f9e2c22c6df0-var-run\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.244104 4710 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7685d25d-2f14-4213-b78b-f9e2c22c6df0-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.244118 4710 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/7685d25d-2f14-4213-b78b-f9e2c22c6df0-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.244142 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7v5zj\" (UniqueName: \"kubernetes.io/projected/7685d25d-2f14-4213-b78b-f9e2c22c6df0-kube-api-access-7v5zj\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.244152 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7685d25d-2f14-4213-b78b-f9e2c22c6df0-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.244163 4710 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7685d25d-2f14-4213-b78b-f9e2c22c6df0-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.394259 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bf9b-account-create-q69nf"] Oct 09 09:19:48 crc kubenswrapper[4710]: E1009 09:19:48.395024 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7685d25d-2f14-4213-b78b-f9e2c22c6df0" containerName="ovn-config" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.395116 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="7685d25d-2f14-4213-b78b-f9e2c22c6df0" containerName="ovn-config" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.395380 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="7685d25d-2f14-4213-b78b-f9e2c22c6df0" containerName="ovn-config" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.395985 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bf9b-account-create-q69nf" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.398029 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.409332 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bf9b-account-create-q69nf"] Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.449025 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctxph\" (UniqueName: \"kubernetes.io/projected/e43c5ce1-db3e-4341-b902-ebc890e268b0-kube-api-access-ctxph\") pod \"keystone-bf9b-account-create-q69nf\" (UID: \"e43c5ce1-db3e-4341-b902-ebc890e268b0\") " pod="openstack/keystone-bf9b-account-create-q69nf" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.550409 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctxph\" (UniqueName: \"kubernetes.io/projected/e43c5ce1-db3e-4341-b902-ebc890e268b0-kube-api-access-ctxph\") pod \"keystone-bf9b-account-create-q69nf\" (UID: \"e43c5ce1-db3e-4341-b902-ebc890e268b0\") " pod="openstack/keystone-bf9b-account-create-q69nf" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.568618 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctxph\" (UniqueName: \"kubernetes.io/projected/e43c5ce1-db3e-4341-b902-ebc890e268b0-kube-api-access-ctxph\") pod \"keystone-bf9b-account-create-q69nf\" (UID: \"e43c5ce1-db3e-4341-b902-ebc890e268b0\") " pod="openstack/keystone-bf9b-account-create-q69nf" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.697252 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-b50b-account-create-dg79d"] Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.698327 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b50b-account-create-dg79d" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.700156 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.707761 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-b50b-account-create-dg79d"] Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.709924 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bf9b-account-create-q69nf" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.757851 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svrwf\" (UniqueName: \"kubernetes.io/projected/6cfbb538-c79f-4a92-8f7e-0ddffac2b192-kube-api-access-svrwf\") pod \"placement-b50b-account-create-dg79d\" (UID: \"6cfbb538-c79f-4a92-8f7e-0ddffac2b192\") " pod="openstack/placement-b50b-account-create-dg79d" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.852276 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ff4n7-config-cv62h" event={"ID":"7685d25d-2f14-4213-b78b-f9e2c22c6df0","Type":"ContainerDied","Data":"3e4135656ab6291d1bbab16d8ee71a23d9d31d2e02a4bc84996971e60afea18c"} Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.852322 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e4135656ab6291d1bbab16d8ee71a23d9d31d2e02a4bc84996971e60afea18c" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.852389 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ff4n7-config-cv62h" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.860542 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svrwf\" (UniqueName: \"kubernetes.io/projected/6cfbb538-c79f-4a92-8f7e-0ddffac2b192-kube-api-access-svrwf\") pod \"placement-b50b-account-create-dg79d\" (UID: \"6cfbb538-c79f-4a92-8f7e-0ddffac2b192\") " pod="openstack/placement-b50b-account-create-dg79d" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.880657 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svrwf\" (UniqueName: \"kubernetes.io/projected/6cfbb538-c79f-4a92-8f7e-0ddffac2b192-kube-api-access-svrwf\") pod \"placement-b50b-account-create-dg79d\" (UID: \"6cfbb538-c79f-4a92-8f7e-0ddffac2b192\") " pod="openstack/placement-b50b-account-create-dg79d" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.909901 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-bb96-account-create-hw97d"] Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.915182 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-bb96-account-create-hw97d" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.918817 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.935198 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-bb96-account-create-hw97d"] Oct 09 09:19:48 crc kubenswrapper[4710]: I1009 09:19:48.961925 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hb9vf\" (UniqueName: \"kubernetes.io/projected/4a27c895-0dd6-4d32-9572-e7a52ff1abbf-kube-api-access-hb9vf\") pod \"glance-bb96-account-create-hw97d\" (UID: \"4a27c895-0dd6-4d32-9572-e7a52ff1abbf\") " pod="openstack/glance-bb96-account-create-hw97d" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.014999 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b50b-account-create-dg79d" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.063787 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hb9vf\" (UniqueName: \"kubernetes.io/projected/4a27c895-0dd6-4d32-9572-e7a52ff1abbf-kube-api-access-hb9vf\") pod \"glance-bb96-account-create-hw97d\" (UID: \"4a27c895-0dd6-4d32-9572-e7a52ff1abbf\") " pod="openstack/glance-bb96-account-create-hw97d" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.103155 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hb9vf\" (UniqueName: \"kubernetes.io/projected/4a27c895-0dd6-4d32-9572-e7a52ff1abbf-kube-api-access-hb9vf\") pod \"glance-bb96-account-create-hw97d\" (UID: \"4a27c895-0dd6-4d32-9572-e7a52ff1abbf\") " pod="openstack/glance-bb96-account-create-hw97d" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.149371 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bf9b-account-create-q69nf"] Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.229251 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ff4n7-config-cv62h"] Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.235994 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-bb96-account-create-hw97d" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.257641 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ff4n7-config-cv62h"] Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.314214 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ff4n7-config-5ks6v"] Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.316501 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ff4n7-config-5ks6v" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.321084 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.332580 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ff4n7-config-5ks6v"] Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.356517 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-b50b-account-create-dg79d"] Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.368295 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nf7rv\" (UniqueName: \"kubernetes.io/projected/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-kube-api-access-nf7rv\") pod \"ovn-controller-ff4n7-config-5ks6v\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " pod="openstack/ovn-controller-ff4n7-config-5ks6v" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.368335 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-var-run\") pod \"ovn-controller-ff4n7-config-5ks6v\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " pod="openstack/ovn-controller-ff4n7-config-5ks6v" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.368359 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-var-run-ovn\") pod \"ovn-controller-ff4n7-config-5ks6v\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " pod="openstack/ovn-controller-ff4n7-config-5ks6v" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.368379 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-var-log-ovn\") pod \"ovn-controller-ff4n7-config-5ks6v\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " pod="openstack/ovn-controller-ff4n7-config-5ks6v" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.368423 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-scripts\") pod \"ovn-controller-ff4n7-config-5ks6v\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " pod="openstack/ovn-controller-ff4n7-config-5ks6v" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.368463 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-additional-scripts\") pod \"ovn-controller-ff4n7-config-5ks6v\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " pod="openstack/ovn-controller-ff4n7-config-5ks6v" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.470391 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-var-run-ovn\") pod \"ovn-controller-ff4n7-config-5ks6v\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " pod="openstack/ovn-controller-ff4n7-config-5ks6v" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.470468 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-var-log-ovn\") pod \"ovn-controller-ff4n7-config-5ks6v\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " pod="openstack/ovn-controller-ff4n7-config-5ks6v" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.470531 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-scripts\") pod \"ovn-controller-ff4n7-config-5ks6v\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " pod="openstack/ovn-controller-ff4n7-config-5ks6v" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.470549 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-additional-scripts\") pod \"ovn-controller-ff4n7-config-5ks6v\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " pod="openstack/ovn-controller-ff4n7-config-5ks6v" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.470593 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nf7rv\" (UniqueName: \"kubernetes.io/projected/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-kube-api-access-nf7rv\") pod \"ovn-controller-ff4n7-config-5ks6v\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " pod="openstack/ovn-controller-ff4n7-config-5ks6v" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.470627 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-var-run\") pod \"ovn-controller-ff4n7-config-5ks6v\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " pod="openstack/ovn-controller-ff4n7-config-5ks6v" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.470901 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-var-run\") pod \"ovn-controller-ff4n7-config-5ks6v\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " pod="openstack/ovn-controller-ff4n7-config-5ks6v" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.470948 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-var-run-ovn\") pod \"ovn-controller-ff4n7-config-5ks6v\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " pod="openstack/ovn-controller-ff4n7-config-5ks6v" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.470982 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-var-log-ovn\") pod \"ovn-controller-ff4n7-config-5ks6v\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " pod="openstack/ovn-controller-ff4n7-config-5ks6v" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.472381 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-additional-scripts\") pod \"ovn-controller-ff4n7-config-5ks6v\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " pod="openstack/ovn-controller-ff4n7-config-5ks6v" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.472740 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-scripts\") pod \"ovn-controller-ff4n7-config-5ks6v\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " pod="openstack/ovn-controller-ff4n7-config-5ks6v" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.491296 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nf7rv\" (UniqueName: \"kubernetes.io/projected/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-kube-api-access-nf7rv\") pod \"ovn-controller-ff4n7-config-5ks6v\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " pod="openstack/ovn-controller-ff4n7-config-5ks6v" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.653502 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ff4n7-config-5ks6v" Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.778692 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-bb96-account-create-hw97d"] Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.792057 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ff4n7" Oct 09 09:19:49 crc kubenswrapper[4710]: W1009 09:19:49.792067 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a27c895_0dd6_4d32_9572_e7a52ff1abbf.slice/crio-173175aec8db44f7a4ac20996fe5c89b9c2d49a0d75d2c245849fe87d87e23a1 WatchSource:0}: Error finding container 173175aec8db44f7a4ac20996fe5c89b9c2d49a0d75d2c245849fe87d87e23a1: Status 404 returned error can't find the container with id 173175aec8db44f7a4ac20996fe5c89b9c2d49a0d75d2c245849fe87d87e23a1 Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.890867 4710 generic.go:334] "Generic (PLEG): container finished" podID="e43c5ce1-db3e-4341-b902-ebc890e268b0" containerID="91968fb37e95aa33bba674031de80c9c31cfeaa91100bfca653ade876fb98df7" exitCode=0 Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.892267 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bf9b-account-create-q69nf" event={"ID":"e43c5ce1-db3e-4341-b902-ebc890e268b0","Type":"ContainerDied","Data":"91968fb37e95aa33bba674031de80c9c31cfeaa91100bfca653ade876fb98df7"} Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.892292 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bf9b-account-create-q69nf" event={"ID":"e43c5ce1-db3e-4341-b902-ebc890e268b0","Type":"ContainerStarted","Data":"4b8b169ca2b7b179b711a79153b19d0fa7efbff3af57bbcace67a17452d0c9f5"} Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.894700 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-bb96-account-create-hw97d" event={"ID":"4a27c895-0dd6-4d32-9572-e7a52ff1abbf","Type":"ContainerStarted","Data":"173175aec8db44f7a4ac20996fe5c89b9c2d49a0d75d2c245849fe87d87e23a1"} Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.899935 4710 generic.go:334] "Generic (PLEG): container finished" podID="6cfbb538-c79f-4a92-8f7e-0ddffac2b192" containerID="6d85b27f2c3296839a5f9deea94f10a1e4a5d676ff3f3d625c43693efad22bee" exitCode=0 Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.899966 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b50b-account-create-dg79d" event={"ID":"6cfbb538-c79f-4a92-8f7e-0ddffac2b192","Type":"ContainerDied","Data":"6d85b27f2c3296839a5f9deea94f10a1e4a5d676ff3f3d625c43693efad22bee"} Oct 09 09:19:49 crc kubenswrapper[4710]: I1009 09:19:49.899981 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b50b-account-create-dg79d" event={"ID":"6cfbb538-c79f-4a92-8f7e-0ddffac2b192","Type":"ContainerStarted","Data":"5e4e0ba6fff5c4c39830cdc104bdccdb8c822f50216d5b2fa6be5dd64cbd10e0"} Oct 09 09:19:50 crc kubenswrapper[4710]: I1009 09:19:50.217224 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ff4n7-config-5ks6v"] Oct 09 09:19:50 crc kubenswrapper[4710]: W1009 09:19:50.223092 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4d2efa29_9dfb_4e7a_9bb1_399770d0e24f.slice/crio-7f7a42aebe9bdd5a314600c8353fcb961a3757f756520128b5f48060dd7f1e5d WatchSource:0}: Error finding container 7f7a42aebe9bdd5a314600c8353fcb961a3757f756520128b5f48060dd7f1e5d: Status 404 returned error can't find the container with id 7f7a42aebe9bdd5a314600c8353fcb961a3757f756520128b5f48060dd7f1e5d Oct 09 09:19:50 crc kubenswrapper[4710]: I1009 09:19:50.823897 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7685d25d-2f14-4213-b78b-f9e2c22c6df0" path="/var/lib/kubelet/pods/7685d25d-2f14-4213-b78b-f9e2c22c6df0/volumes" Oct 09 09:19:50 crc kubenswrapper[4710]: I1009 09:19:50.908553 4710 generic.go:334] "Generic (PLEG): container finished" podID="4d2efa29-9dfb-4e7a-9bb1-399770d0e24f" containerID="70c76449ffa14657d27d832fb38a8026d5e3db5054835540b70e8b5d694ad6e7" exitCode=0 Oct 09 09:19:50 crc kubenswrapper[4710]: I1009 09:19:50.908637 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ff4n7-config-5ks6v" event={"ID":"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f","Type":"ContainerDied","Data":"70c76449ffa14657d27d832fb38a8026d5e3db5054835540b70e8b5d694ad6e7"} Oct 09 09:19:50 crc kubenswrapper[4710]: I1009 09:19:50.908674 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ff4n7-config-5ks6v" event={"ID":"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f","Type":"ContainerStarted","Data":"7f7a42aebe9bdd5a314600c8353fcb961a3757f756520128b5f48060dd7f1e5d"} Oct 09 09:19:50 crc kubenswrapper[4710]: I1009 09:19:50.910376 4710 generic.go:334] "Generic (PLEG): container finished" podID="4a27c895-0dd6-4d32-9572-e7a52ff1abbf" containerID="e2bd2726ed439093dd05133b380be993a6fed28de75128de3300dfa2a2e4655a" exitCode=0 Oct 09 09:19:50 crc kubenswrapper[4710]: I1009 09:19:50.910514 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-bb96-account-create-hw97d" event={"ID":"4a27c895-0dd6-4d32-9572-e7a52ff1abbf","Type":"ContainerDied","Data":"e2bd2726ed439093dd05133b380be993a6fed28de75128de3300dfa2a2e4655a"} Oct 09 09:19:51 crc kubenswrapper[4710]: I1009 09:19:51.259272 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bf9b-account-create-q69nf" Oct 09 09:19:51 crc kubenswrapper[4710]: I1009 09:19:51.267081 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b50b-account-create-dg79d" Oct 09 09:19:51 crc kubenswrapper[4710]: I1009 09:19:51.305034 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svrwf\" (UniqueName: \"kubernetes.io/projected/6cfbb538-c79f-4a92-8f7e-0ddffac2b192-kube-api-access-svrwf\") pod \"6cfbb538-c79f-4a92-8f7e-0ddffac2b192\" (UID: \"6cfbb538-c79f-4a92-8f7e-0ddffac2b192\") " Oct 09 09:19:51 crc kubenswrapper[4710]: I1009 09:19:51.305117 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ctxph\" (UniqueName: \"kubernetes.io/projected/e43c5ce1-db3e-4341-b902-ebc890e268b0-kube-api-access-ctxph\") pod \"e43c5ce1-db3e-4341-b902-ebc890e268b0\" (UID: \"e43c5ce1-db3e-4341-b902-ebc890e268b0\") " Oct 09 09:19:51 crc kubenswrapper[4710]: I1009 09:19:51.312850 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e43c5ce1-db3e-4341-b902-ebc890e268b0-kube-api-access-ctxph" (OuterVolumeSpecName: "kube-api-access-ctxph") pod "e43c5ce1-db3e-4341-b902-ebc890e268b0" (UID: "e43c5ce1-db3e-4341-b902-ebc890e268b0"). InnerVolumeSpecName "kube-api-access-ctxph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:19:51 crc kubenswrapper[4710]: I1009 09:19:51.314235 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cfbb538-c79f-4a92-8f7e-0ddffac2b192-kube-api-access-svrwf" (OuterVolumeSpecName: "kube-api-access-svrwf") pod "6cfbb538-c79f-4a92-8f7e-0ddffac2b192" (UID: "6cfbb538-c79f-4a92-8f7e-0ddffac2b192"). InnerVolumeSpecName "kube-api-access-svrwf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:19:51 crc kubenswrapper[4710]: I1009 09:19:51.407913 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svrwf\" (UniqueName: \"kubernetes.io/projected/6cfbb538-c79f-4a92-8f7e-0ddffac2b192-kube-api-access-svrwf\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:51 crc kubenswrapper[4710]: I1009 09:19:51.408061 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ctxph\" (UniqueName: \"kubernetes.io/projected/e43c5ce1-db3e-4341-b902-ebc890e268b0-kube-api-access-ctxph\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:51 crc kubenswrapper[4710]: I1009 09:19:51.924202 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b50b-account-create-dg79d" event={"ID":"6cfbb538-c79f-4a92-8f7e-0ddffac2b192","Type":"ContainerDied","Data":"5e4e0ba6fff5c4c39830cdc104bdccdb8c822f50216d5b2fa6be5dd64cbd10e0"} Oct 09 09:19:51 crc kubenswrapper[4710]: I1009 09:19:51.924699 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e4e0ba6fff5c4c39830cdc104bdccdb8c822f50216d5b2fa6be5dd64cbd10e0" Oct 09 09:19:51 crc kubenswrapper[4710]: I1009 09:19:51.924578 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b50b-account-create-dg79d" Oct 09 09:19:51 crc kubenswrapper[4710]: I1009 09:19:51.926511 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bf9b-account-create-q69nf" Oct 09 09:19:51 crc kubenswrapper[4710]: I1009 09:19:51.928873 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bf9b-account-create-q69nf" event={"ID":"e43c5ce1-db3e-4341-b902-ebc890e268b0","Type":"ContainerDied","Data":"4b8b169ca2b7b179b711a79153b19d0fa7efbff3af57bbcace67a17452d0c9f5"} Oct 09 09:19:51 crc kubenswrapper[4710]: I1009 09:19:51.928927 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b8b169ca2b7b179b711a79153b19d0fa7efbff3af57bbcace67a17452d0c9f5" Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.153305 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ff4n7-config-5ks6v" Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.219817 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-bb96-account-create-hw97d" Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.222292 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-var-log-ovn\") pod \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.222360 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-additional-scripts\") pod \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.222461 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nf7rv\" (UniqueName: \"kubernetes.io/projected/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-kube-api-access-nf7rv\") pod \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.222529 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-var-run\") pod \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.222548 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "4d2efa29-9dfb-4e7a-9bb1-399770d0e24f" (UID: "4d2efa29-9dfb-4e7a-9bb1-399770d0e24f"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.222575 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-scripts\") pod \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.222668 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-var-run-ovn\") pod \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\" (UID: \"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f\") " Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.222797 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-var-run" (OuterVolumeSpecName: "var-run") pod "4d2efa29-9dfb-4e7a-9bb1-399770d0e24f" (UID: "4d2efa29-9dfb-4e7a-9bb1-399770d0e24f"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.223516 4710 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.223540 4710 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-var-run\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.223719 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "4d2efa29-9dfb-4e7a-9bb1-399770d0e24f" (UID: "4d2efa29-9dfb-4e7a-9bb1-399770d0e24f"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.223820 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "4d2efa29-9dfb-4e7a-9bb1-399770d0e24f" (UID: "4d2efa29-9dfb-4e7a-9bb1-399770d0e24f"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.224078 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-scripts" (OuterVolumeSpecName: "scripts") pod "4d2efa29-9dfb-4e7a-9bb1-399770d0e24f" (UID: "4d2efa29-9dfb-4e7a-9bb1-399770d0e24f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.228624 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-kube-api-access-nf7rv" (OuterVolumeSpecName: "kube-api-access-nf7rv") pod "4d2efa29-9dfb-4e7a-9bb1-399770d0e24f" (UID: "4d2efa29-9dfb-4e7a-9bb1-399770d0e24f"). InnerVolumeSpecName "kube-api-access-nf7rv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.325310 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hb9vf\" (UniqueName: \"kubernetes.io/projected/4a27c895-0dd6-4d32-9572-e7a52ff1abbf-kube-api-access-hb9vf\") pod \"4a27c895-0dd6-4d32-9572-e7a52ff1abbf\" (UID: \"4a27c895-0dd6-4d32-9572-e7a52ff1abbf\") " Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.326279 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.326303 4710 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.326316 4710 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.326326 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nf7rv\" (UniqueName: \"kubernetes.io/projected/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f-kube-api-access-nf7rv\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.328222 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a27c895-0dd6-4d32-9572-e7a52ff1abbf-kube-api-access-hb9vf" (OuterVolumeSpecName: "kube-api-access-hb9vf") pod "4a27c895-0dd6-4d32-9572-e7a52ff1abbf" (UID: "4a27c895-0dd6-4d32-9572-e7a52ff1abbf"). InnerVolumeSpecName "kube-api-access-hb9vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.428102 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hb9vf\" (UniqueName: \"kubernetes.io/projected/4a27c895-0dd6-4d32-9572-e7a52ff1abbf-kube-api-access-hb9vf\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.936778 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ff4n7-config-5ks6v" event={"ID":"4d2efa29-9dfb-4e7a-9bb1-399770d0e24f","Type":"ContainerDied","Data":"7f7a42aebe9bdd5a314600c8353fcb961a3757f756520128b5f48060dd7f1e5d"} Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.936812 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ff4n7-config-5ks6v" Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.936829 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f7a42aebe9bdd5a314600c8353fcb961a3757f756520128b5f48060dd7f1e5d" Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.938729 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-bb96-account-create-hw97d" event={"ID":"4a27c895-0dd6-4d32-9572-e7a52ff1abbf","Type":"ContainerDied","Data":"173175aec8db44f7a4ac20996fe5c89b9c2d49a0d75d2c245849fe87d87e23a1"} Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.938756 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="173175aec8db44f7a4ac20996fe5c89b9c2d49a0d75d2c245849fe87d87e23a1" Oct 09 09:19:52 crc kubenswrapper[4710]: I1009 09:19:52.938801 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-bb96-account-create-hw97d" Oct 09 09:19:53 crc kubenswrapper[4710]: I1009 09:19:53.211633 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ff4n7-config-5ks6v"] Oct 09 09:19:53 crc kubenswrapper[4710]: I1009 09:19:53.216034 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ff4n7-config-5ks6v"] Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.019710 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-5pkvd"] Oct 09 09:19:54 crc kubenswrapper[4710]: E1009 09:19:54.020393 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d2efa29-9dfb-4e7a-9bb1-399770d0e24f" containerName="ovn-config" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.020414 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d2efa29-9dfb-4e7a-9bb1-399770d0e24f" containerName="ovn-config" Oct 09 09:19:54 crc kubenswrapper[4710]: E1009 09:19:54.020461 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a27c895-0dd6-4d32-9572-e7a52ff1abbf" containerName="mariadb-account-create" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.020473 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a27c895-0dd6-4d32-9572-e7a52ff1abbf" containerName="mariadb-account-create" Oct 09 09:19:54 crc kubenswrapper[4710]: E1009 09:19:54.020501 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e43c5ce1-db3e-4341-b902-ebc890e268b0" containerName="mariadb-account-create" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.020510 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="e43c5ce1-db3e-4341-b902-ebc890e268b0" containerName="mariadb-account-create" Oct 09 09:19:54 crc kubenswrapper[4710]: E1009 09:19:54.020522 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cfbb538-c79f-4a92-8f7e-0ddffac2b192" containerName="mariadb-account-create" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.020545 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cfbb538-c79f-4a92-8f7e-0ddffac2b192" containerName="mariadb-account-create" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.020810 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="e43c5ce1-db3e-4341-b902-ebc890e268b0" containerName="mariadb-account-create" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.020838 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d2efa29-9dfb-4e7a-9bb1-399770d0e24f" containerName="ovn-config" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.020861 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cfbb538-c79f-4a92-8f7e-0ddffac2b192" containerName="mariadb-account-create" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.020873 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a27c895-0dd6-4d32-9572-e7a52ff1abbf" containerName="mariadb-account-create" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.022051 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-5pkvd" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.025632 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.029032 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-gcdnv" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.030787 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-5pkvd"] Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.053674 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b43105bc-a07d-402a-bef9-d4946f2827c3-combined-ca-bundle\") pod \"glance-db-sync-5pkvd\" (UID: \"b43105bc-a07d-402a-bef9-d4946f2827c3\") " pod="openstack/glance-db-sync-5pkvd" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.053814 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b43105bc-a07d-402a-bef9-d4946f2827c3-config-data\") pod \"glance-db-sync-5pkvd\" (UID: \"b43105bc-a07d-402a-bef9-d4946f2827c3\") " pod="openstack/glance-db-sync-5pkvd" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.053851 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b43105bc-a07d-402a-bef9-d4946f2827c3-db-sync-config-data\") pod \"glance-db-sync-5pkvd\" (UID: \"b43105bc-a07d-402a-bef9-d4946f2827c3\") " pod="openstack/glance-db-sync-5pkvd" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.053952 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dm9lt\" (UniqueName: \"kubernetes.io/projected/b43105bc-a07d-402a-bef9-d4946f2827c3-kube-api-access-dm9lt\") pod \"glance-db-sync-5pkvd\" (UID: \"b43105bc-a07d-402a-bef9-d4946f2827c3\") " pod="openstack/glance-db-sync-5pkvd" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.156186 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dm9lt\" (UniqueName: \"kubernetes.io/projected/b43105bc-a07d-402a-bef9-d4946f2827c3-kube-api-access-dm9lt\") pod \"glance-db-sync-5pkvd\" (UID: \"b43105bc-a07d-402a-bef9-d4946f2827c3\") " pod="openstack/glance-db-sync-5pkvd" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.156275 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b43105bc-a07d-402a-bef9-d4946f2827c3-combined-ca-bundle\") pod \"glance-db-sync-5pkvd\" (UID: \"b43105bc-a07d-402a-bef9-d4946f2827c3\") " pod="openstack/glance-db-sync-5pkvd" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.156325 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b43105bc-a07d-402a-bef9-d4946f2827c3-config-data\") pod \"glance-db-sync-5pkvd\" (UID: \"b43105bc-a07d-402a-bef9-d4946f2827c3\") " pod="openstack/glance-db-sync-5pkvd" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.156347 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b43105bc-a07d-402a-bef9-d4946f2827c3-db-sync-config-data\") pod \"glance-db-sync-5pkvd\" (UID: \"b43105bc-a07d-402a-bef9-d4946f2827c3\") " pod="openstack/glance-db-sync-5pkvd" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.162149 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b43105bc-a07d-402a-bef9-d4946f2827c3-db-sync-config-data\") pod \"glance-db-sync-5pkvd\" (UID: \"b43105bc-a07d-402a-bef9-d4946f2827c3\") " pod="openstack/glance-db-sync-5pkvd" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.162168 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b43105bc-a07d-402a-bef9-d4946f2827c3-combined-ca-bundle\") pod \"glance-db-sync-5pkvd\" (UID: \"b43105bc-a07d-402a-bef9-d4946f2827c3\") " pod="openstack/glance-db-sync-5pkvd" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.162642 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b43105bc-a07d-402a-bef9-d4946f2827c3-config-data\") pod \"glance-db-sync-5pkvd\" (UID: \"b43105bc-a07d-402a-bef9-d4946f2827c3\") " pod="openstack/glance-db-sync-5pkvd" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.173588 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dm9lt\" (UniqueName: \"kubernetes.io/projected/b43105bc-a07d-402a-bef9-d4946f2827c3-kube-api-access-dm9lt\") pod \"glance-db-sync-5pkvd\" (UID: \"b43105bc-a07d-402a-bef9-d4946f2827c3\") " pod="openstack/glance-db-sync-5pkvd" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.337901 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-5pkvd" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.845257 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d2efa29-9dfb-4e7a-9bb1-399770d0e24f" path="/var/lib/kubelet/pods/4d2efa29-9dfb-4e7a-9bb1-399770d0e24f/volumes" Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.848306 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-5pkvd"] Oct 09 09:19:54 crc kubenswrapper[4710]: I1009 09:19:54.955399 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-5pkvd" event={"ID":"b43105bc-a07d-402a-bef9-d4946f2827c3","Type":"ContainerStarted","Data":"001e26d2769494423af007d00b24aa9d5d8690960474d862a1d7aecac4c05a61"} Oct 09 09:19:55 crc kubenswrapper[4710]: I1009 09:19:55.298615 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 09 09:19:55 crc kubenswrapper[4710]: I1009 09:19:55.677634 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:19:55 crc kubenswrapper[4710]: I1009 09:19:55.708388 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-6xvgl"] Oct 09 09:19:55 crc kubenswrapper[4710]: I1009 09:19:55.709332 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6xvgl" Oct 09 09:19:55 crc kubenswrapper[4710]: I1009 09:19:55.736166 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-6xvgl"] Oct 09 09:19:55 crc kubenswrapper[4710]: I1009 09:19:55.810793 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-nd6hc"] Oct 09 09:19:55 crc kubenswrapper[4710]: I1009 09:19:55.811729 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nd6hc" Oct 09 09:19:55 crc kubenswrapper[4710]: I1009 09:19:55.880960 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-nd6hc"] Oct 09 09:19:55 crc kubenswrapper[4710]: I1009 09:19:55.888790 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzwqg\" (UniqueName: \"kubernetes.io/projected/d6c6070b-6e0d-45a9-bbd8-5d28cd740a11-kube-api-access-xzwqg\") pod \"cinder-db-create-6xvgl\" (UID: \"d6c6070b-6e0d-45a9-bbd8-5d28cd740a11\") " pod="openstack/cinder-db-create-6xvgl" Oct 09 09:19:55 crc kubenswrapper[4710]: I1009 09:19:55.911108 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-9b87z"] Oct 09 09:19:55 crc kubenswrapper[4710]: I1009 09:19:55.912167 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-9b87z" Oct 09 09:19:55 crc kubenswrapper[4710]: I1009 09:19:55.940638 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-9b87z"] Oct 09 09:19:55 crc kubenswrapper[4710]: I1009 09:19:55.990057 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5q2d\" (UniqueName: \"kubernetes.io/projected/3fe9051e-5ccf-4754-9dd0-264bc9b32e9b-kube-api-access-j5q2d\") pod \"barbican-db-create-nd6hc\" (UID: \"3fe9051e-5ccf-4754-9dd0-264bc9b32e9b\") " pod="openstack/barbican-db-create-nd6hc" Oct 09 09:19:55 crc kubenswrapper[4710]: I1009 09:19:55.990121 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djxb2\" (UniqueName: \"kubernetes.io/projected/0bfaedef-c78e-4e3f-88a4-820aa84cb116-kube-api-access-djxb2\") pod \"neutron-db-create-9b87z\" (UID: \"0bfaedef-c78e-4e3f-88a4-820aa84cb116\") " pod="openstack/neutron-db-create-9b87z" Oct 09 09:19:55 crc kubenswrapper[4710]: I1009 09:19:55.990292 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzwqg\" (UniqueName: \"kubernetes.io/projected/d6c6070b-6e0d-45a9-bbd8-5d28cd740a11-kube-api-access-xzwqg\") pod \"cinder-db-create-6xvgl\" (UID: \"d6c6070b-6e0d-45a9-bbd8-5d28cd740a11\") " pod="openstack/cinder-db-create-6xvgl" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.024480 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzwqg\" (UniqueName: \"kubernetes.io/projected/d6c6070b-6e0d-45a9-bbd8-5d28cd740a11-kube-api-access-xzwqg\") pod \"cinder-db-create-6xvgl\" (UID: \"d6c6070b-6e0d-45a9-bbd8-5d28cd740a11\") " pod="openstack/cinder-db-create-6xvgl" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.027340 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6xvgl" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.092235 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5q2d\" (UniqueName: \"kubernetes.io/projected/3fe9051e-5ccf-4754-9dd0-264bc9b32e9b-kube-api-access-j5q2d\") pod \"barbican-db-create-nd6hc\" (UID: \"3fe9051e-5ccf-4754-9dd0-264bc9b32e9b\") " pod="openstack/barbican-db-create-nd6hc" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.092287 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djxb2\" (UniqueName: \"kubernetes.io/projected/0bfaedef-c78e-4e3f-88a4-820aa84cb116-kube-api-access-djxb2\") pod \"neutron-db-create-9b87z\" (UID: \"0bfaedef-c78e-4e3f-88a4-820aa84cb116\") " pod="openstack/neutron-db-create-9b87z" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.115810 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5q2d\" (UniqueName: \"kubernetes.io/projected/3fe9051e-5ccf-4754-9dd0-264bc9b32e9b-kube-api-access-j5q2d\") pod \"barbican-db-create-nd6hc\" (UID: \"3fe9051e-5ccf-4754-9dd0-264bc9b32e9b\") " pod="openstack/barbican-db-create-nd6hc" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.116660 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djxb2\" (UniqueName: \"kubernetes.io/projected/0bfaedef-c78e-4e3f-88a4-820aa84cb116-kube-api-access-djxb2\") pod \"neutron-db-create-9b87z\" (UID: \"0bfaedef-c78e-4e3f-88a4-820aa84cb116\") " pod="openstack/neutron-db-create-9b87z" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.124316 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nd6hc" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.222650 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-m22wk"] Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.225228 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-m22wk" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.232416 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-m22wk"] Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.234315 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-9b87z" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.239573 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-jdrf2" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.239581 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.239718 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.240699 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.406245 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/233cf622-10b1-47bb-acf6-12f79fb6ea66-config-data\") pod \"keystone-db-sync-m22wk\" (UID: \"233cf622-10b1-47bb-acf6-12f79fb6ea66\") " pod="openstack/keystone-db-sync-m22wk" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.406295 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r262x\" (UniqueName: \"kubernetes.io/projected/233cf622-10b1-47bb-acf6-12f79fb6ea66-kube-api-access-r262x\") pod \"keystone-db-sync-m22wk\" (UID: \"233cf622-10b1-47bb-acf6-12f79fb6ea66\") " pod="openstack/keystone-db-sync-m22wk" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.406314 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/233cf622-10b1-47bb-acf6-12f79fb6ea66-combined-ca-bundle\") pod \"keystone-db-sync-m22wk\" (UID: \"233cf622-10b1-47bb-acf6-12f79fb6ea66\") " pod="openstack/keystone-db-sync-m22wk" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.507757 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/233cf622-10b1-47bb-acf6-12f79fb6ea66-config-data\") pod \"keystone-db-sync-m22wk\" (UID: \"233cf622-10b1-47bb-acf6-12f79fb6ea66\") " pod="openstack/keystone-db-sync-m22wk" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.507830 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r262x\" (UniqueName: \"kubernetes.io/projected/233cf622-10b1-47bb-acf6-12f79fb6ea66-kube-api-access-r262x\") pod \"keystone-db-sync-m22wk\" (UID: \"233cf622-10b1-47bb-acf6-12f79fb6ea66\") " pod="openstack/keystone-db-sync-m22wk" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.507851 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/233cf622-10b1-47bb-acf6-12f79fb6ea66-combined-ca-bundle\") pod \"keystone-db-sync-m22wk\" (UID: \"233cf622-10b1-47bb-acf6-12f79fb6ea66\") " pod="openstack/keystone-db-sync-m22wk" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.513396 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/233cf622-10b1-47bb-acf6-12f79fb6ea66-combined-ca-bundle\") pod \"keystone-db-sync-m22wk\" (UID: \"233cf622-10b1-47bb-acf6-12f79fb6ea66\") " pod="openstack/keystone-db-sync-m22wk" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.513622 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/233cf622-10b1-47bb-acf6-12f79fb6ea66-config-data\") pod \"keystone-db-sync-m22wk\" (UID: \"233cf622-10b1-47bb-acf6-12f79fb6ea66\") " pod="openstack/keystone-db-sync-m22wk" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.528867 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r262x\" (UniqueName: \"kubernetes.io/projected/233cf622-10b1-47bb-acf6-12f79fb6ea66-kube-api-access-r262x\") pod \"keystone-db-sync-m22wk\" (UID: \"233cf622-10b1-47bb-acf6-12f79fb6ea66\") " pod="openstack/keystone-db-sync-m22wk" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.564060 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-6xvgl"] Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.574346 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-m22wk" Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.689163 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-nd6hc"] Oct 09 09:19:56 crc kubenswrapper[4710]: W1009 09:19:56.698681 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3fe9051e_5ccf_4754_9dd0_264bc9b32e9b.slice/crio-3e558eda0105ba5789d15c3c8667ba2da50d3bbae5de15b1e0bfb282d4060178 WatchSource:0}: Error finding container 3e558eda0105ba5789d15c3c8667ba2da50d3bbae5de15b1e0bfb282d4060178: Status 404 returned error can't find the container with id 3e558eda0105ba5789d15c3c8667ba2da50d3bbae5de15b1e0bfb282d4060178 Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.757348 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-9b87z"] Oct 09 09:19:56 crc kubenswrapper[4710]: W1009 09:19:56.764097 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0bfaedef_c78e_4e3f_88a4_820aa84cb116.slice/crio-c98f46616bc39811583857b3c51c964bb65ba02a03876e92d1accf0ba9bb053b WatchSource:0}: Error finding container c98f46616bc39811583857b3c51c964bb65ba02a03876e92d1accf0ba9bb053b: Status 404 returned error can't find the container with id c98f46616bc39811583857b3c51c964bb65ba02a03876e92d1accf0ba9bb053b Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.872034 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-m22wk"] Oct 09 09:19:56 crc kubenswrapper[4710]: W1009 09:19:56.888785 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod233cf622_10b1_47bb_acf6_12f79fb6ea66.slice/crio-900f0ece258e9ae8fbed6819f60afe389e7b17440594135bce9eac19139015a6 WatchSource:0}: Error finding container 900f0ece258e9ae8fbed6819f60afe389e7b17440594135bce9eac19139015a6: Status 404 returned error can't find the container with id 900f0ece258e9ae8fbed6819f60afe389e7b17440594135bce9eac19139015a6 Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.971872 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-9b87z" event={"ID":"0bfaedef-c78e-4e3f-88a4-820aa84cb116","Type":"ContainerStarted","Data":"fc95db0b2f4f099d80594eceeebddd26924653923570ee0c7d201a7e911e9254"} Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.971920 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-9b87z" event={"ID":"0bfaedef-c78e-4e3f-88a4-820aa84cb116","Type":"ContainerStarted","Data":"c98f46616bc39811583857b3c51c964bb65ba02a03876e92d1accf0ba9bb053b"} Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.975695 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-nd6hc" event={"ID":"3fe9051e-5ccf-4754-9dd0-264bc9b32e9b","Type":"ContainerStarted","Data":"8814a5fb02920a37c3ebac7d81701836fcbff8deefe7f15f4b8a8ea4aaa3cb50"} Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.975913 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-nd6hc" event={"ID":"3fe9051e-5ccf-4754-9dd0-264bc9b32e9b","Type":"ContainerStarted","Data":"3e558eda0105ba5789d15c3c8667ba2da50d3bbae5de15b1e0bfb282d4060178"} Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.976515 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-m22wk" event={"ID":"233cf622-10b1-47bb-acf6-12f79fb6ea66","Type":"ContainerStarted","Data":"900f0ece258e9ae8fbed6819f60afe389e7b17440594135bce9eac19139015a6"} Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.977698 4710 generic.go:334] "Generic (PLEG): container finished" podID="d6c6070b-6e0d-45a9-bbd8-5d28cd740a11" containerID="d8b9ef2aa408bed2ea317b5c2f23ba2906aa6dab8724ec7470a2fc52adffcdbd" exitCode=0 Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.977730 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-6xvgl" event={"ID":"d6c6070b-6e0d-45a9-bbd8-5d28cd740a11","Type":"ContainerDied","Data":"d8b9ef2aa408bed2ea317b5c2f23ba2906aa6dab8724ec7470a2fc52adffcdbd"} Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.977747 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-6xvgl" event={"ID":"d6c6070b-6e0d-45a9-bbd8-5d28cd740a11","Type":"ContainerStarted","Data":"83e0c428486ee01caa9db7f331d42a8b09bf46cc4b4fa71cb9e465be00ab65e2"} Oct 09 09:19:56 crc kubenswrapper[4710]: I1009 09:19:56.992199 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-9b87z" podStartSLOduration=1.992188109 podStartE2EDuration="1.992188109s" podCreationTimestamp="2025-10-09 09:19:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:19:56.984279797 +0000 UTC m=+920.474388194" watchObservedRunningTime="2025-10-09 09:19:56.992188109 +0000 UTC m=+920.482296506" Oct 09 09:19:57 crc kubenswrapper[4710]: I1009 09:19:57.019102 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-nd6hc" podStartSLOduration=2.019080016 podStartE2EDuration="2.019080016s" podCreationTimestamp="2025-10-09 09:19:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:19:57.010408826 +0000 UTC m=+920.500517223" watchObservedRunningTime="2025-10-09 09:19:57.019080016 +0000 UTC m=+920.509188413" Oct 09 09:19:57 crc kubenswrapper[4710]: I1009 09:19:57.987569 4710 generic.go:334] "Generic (PLEG): container finished" podID="0bfaedef-c78e-4e3f-88a4-820aa84cb116" containerID="fc95db0b2f4f099d80594eceeebddd26924653923570ee0c7d201a7e911e9254" exitCode=0 Oct 09 09:19:57 crc kubenswrapper[4710]: I1009 09:19:57.987632 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-9b87z" event={"ID":"0bfaedef-c78e-4e3f-88a4-820aa84cb116","Type":"ContainerDied","Data":"fc95db0b2f4f099d80594eceeebddd26924653923570ee0c7d201a7e911e9254"} Oct 09 09:19:57 crc kubenswrapper[4710]: I1009 09:19:57.989986 4710 generic.go:334] "Generic (PLEG): container finished" podID="3fe9051e-5ccf-4754-9dd0-264bc9b32e9b" containerID="8814a5fb02920a37c3ebac7d81701836fcbff8deefe7f15f4b8a8ea4aaa3cb50" exitCode=0 Oct 09 09:19:57 crc kubenswrapper[4710]: I1009 09:19:57.990223 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-nd6hc" event={"ID":"3fe9051e-5ccf-4754-9dd0-264bc9b32e9b","Type":"ContainerDied","Data":"8814a5fb02920a37c3ebac7d81701836fcbff8deefe7f15f4b8a8ea4aaa3cb50"} Oct 09 09:19:58 crc kubenswrapper[4710]: I1009 09:19:58.307868 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6xvgl" Oct 09 09:19:58 crc kubenswrapper[4710]: I1009 09:19:58.453610 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzwqg\" (UniqueName: \"kubernetes.io/projected/d6c6070b-6e0d-45a9-bbd8-5d28cd740a11-kube-api-access-xzwqg\") pod \"d6c6070b-6e0d-45a9-bbd8-5d28cd740a11\" (UID: \"d6c6070b-6e0d-45a9-bbd8-5d28cd740a11\") " Oct 09 09:19:58 crc kubenswrapper[4710]: I1009 09:19:58.475533 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6c6070b-6e0d-45a9-bbd8-5d28cd740a11-kube-api-access-xzwqg" (OuterVolumeSpecName: "kube-api-access-xzwqg") pod "d6c6070b-6e0d-45a9-bbd8-5d28cd740a11" (UID: "d6c6070b-6e0d-45a9-bbd8-5d28cd740a11"). InnerVolumeSpecName "kube-api-access-xzwqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:19:58 crc kubenswrapper[4710]: I1009 09:19:58.557219 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzwqg\" (UniqueName: \"kubernetes.io/projected/d6c6070b-6e0d-45a9-bbd8-5d28cd740a11-kube-api-access-xzwqg\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:59 crc kubenswrapper[4710]: I1009 09:19:59.003387 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6xvgl" Oct 09 09:19:59 crc kubenswrapper[4710]: I1009 09:19:59.004341 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-6xvgl" event={"ID":"d6c6070b-6e0d-45a9-bbd8-5d28cd740a11","Type":"ContainerDied","Data":"83e0c428486ee01caa9db7f331d42a8b09bf46cc4b4fa71cb9e465be00ab65e2"} Oct 09 09:19:59 crc kubenswrapper[4710]: I1009 09:19:59.004375 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83e0c428486ee01caa9db7f331d42a8b09bf46cc4b4fa71cb9e465be00ab65e2" Oct 09 09:19:59 crc kubenswrapper[4710]: I1009 09:19:59.277106 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-9b87z" Oct 09 09:19:59 crc kubenswrapper[4710]: I1009 09:19:59.380537 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djxb2\" (UniqueName: \"kubernetes.io/projected/0bfaedef-c78e-4e3f-88a4-820aa84cb116-kube-api-access-djxb2\") pod \"0bfaedef-c78e-4e3f-88a4-820aa84cb116\" (UID: \"0bfaedef-c78e-4e3f-88a4-820aa84cb116\") " Oct 09 09:19:59 crc kubenswrapper[4710]: I1009 09:19:59.392750 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bfaedef-c78e-4e3f-88a4-820aa84cb116-kube-api-access-djxb2" (OuterVolumeSpecName: "kube-api-access-djxb2") pod "0bfaedef-c78e-4e3f-88a4-820aa84cb116" (UID: "0bfaedef-c78e-4e3f-88a4-820aa84cb116"). InnerVolumeSpecName "kube-api-access-djxb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:19:59 crc kubenswrapper[4710]: I1009 09:19:59.393914 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nd6hc" Oct 09 09:19:59 crc kubenswrapper[4710]: I1009 09:19:59.483134 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5q2d\" (UniqueName: \"kubernetes.io/projected/3fe9051e-5ccf-4754-9dd0-264bc9b32e9b-kube-api-access-j5q2d\") pod \"3fe9051e-5ccf-4754-9dd0-264bc9b32e9b\" (UID: \"3fe9051e-5ccf-4754-9dd0-264bc9b32e9b\") " Oct 09 09:19:59 crc kubenswrapper[4710]: I1009 09:19:59.484030 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djxb2\" (UniqueName: \"kubernetes.io/projected/0bfaedef-c78e-4e3f-88a4-820aa84cb116-kube-api-access-djxb2\") on node \"crc\" DevicePath \"\"" Oct 09 09:19:59 crc kubenswrapper[4710]: I1009 09:19:59.488922 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fe9051e-5ccf-4754-9dd0-264bc9b32e9b-kube-api-access-j5q2d" (OuterVolumeSpecName: "kube-api-access-j5q2d") pod "3fe9051e-5ccf-4754-9dd0-264bc9b32e9b" (UID: "3fe9051e-5ccf-4754-9dd0-264bc9b32e9b"). InnerVolumeSpecName "kube-api-access-j5q2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:19:59 crc kubenswrapper[4710]: I1009 09:19:59.593050 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5q2d\" (UniqueName: \"kubernetes.io/projected/3fe9051e-5ccf-4754-9dd0-264bc9b32e9b-kube-api-access-j5q2d\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:00 crc kubenswrapper[4710]: I1009 09:20:00.013169 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-9b87z" event={"ID":"0bfaedef-c78e-4e3f-88a4-820aa84cb116","Type":"ContainerDied","Data":"c98f46616bc39811583857b3c51c964bb65ba02a03876e92d1accf0ba9bb053b"} Oct 09 09:20:00 crc kubenswrapper[4710]: I1009 09:20:00.013635 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c98f46616bc39811583857b3c51c964bb65ba02a03876e92d1accf0ba9bb053b" Oct 09 09:20:00 crc kubenswrapper[4710]: I1009 09:20:00.013667 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-9b87z" Oct 09 09:20:00 crc kubenswrapper[4710]: I1009 09:20:00.015882 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-nd6hc" event={"ID":"3fe9051e-5ccf-4754-9dd0-264bc9b32e9b","Type":"ContainerDied","Data":"3e558eda0105ba5789d15c3c8667ba2da50d3bbae5de15b1e0bfb282d4060178"} Oct 09 09:20:00 crc kubenswrapper[4710]: I1009 09:20:00.015944 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e558eda0105ba5789d15c3c8667ba2da50d3bbae5de15b1e0bfb282d4060178" Oct 09 09:20:00 crc kubenswrapper[4710]: I1009 09:20:00.015957 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-nd6hc" Oct 09 09:20:04 crc kubenswrapper[4710]: I1009 09:20:04.054934 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-m22wk" event={"ID":"233cf622-10b1-47bb-acf6-12f79fb6ea66","Type":"ContainerStarted","Data":"a1bfcf519c7213ae0e4ef45a0e7719a874f24cb3ebace84c75b914f82e5c8c6d"} Oct 09 09:20:04 crc kubenswrapper[4710]: I1009 09:20:04.079459 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-m22wk" podStartSLOduration=1.874793898 podStartE2EDuration="8.079425631s" podCreationTimestamp="2025-10-09 09:19:56 +0000 UTC" firstStartedPulling="2025-10-09 09:19:56.899016423 +0000 UTC m=+920.389124820" lastFinishedPulling="2025-10-09 09:20:03.103648156 +0000 UTC m=+926.593756553" observedRunningTime="2025-10-09 09:20:04.075660731 +0000 UTC m=+927.565769129" watchObservedRunningTime="2025-10-09 09:20:04.079425631 +0000 UTC m=+927.569534028" Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.065648 4710 generic.go:334] "Generic (PLEG): container finished" podID="233cf622-10b1-47bb-acf6-12f79fb6ea66" containerID="a1bfcf519c7213ae0e4ef45a0e7719a874f24cb3ebace84c75b914f82e5c8c6d" exitCode=0 Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.065703 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-m22wk" event={"ID":"233cf622-10b1-47bb-acf6-12f79fb6ea66","Type":"ContainerDied","Data":"a1bfcf519c7213ae0e4ef45a0e7719a874f24cb3ebace84c75b914f82e5c8c6d"} Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.546485 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.546923 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.546991 4710 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.548315 4710 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8f8033ef1c1546ba9de192c838c8a65cbd27bdb87c7505416703609919040c7b"} pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.548398 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" containerID="cri-o://8f8033ef1c1546ba9de192c838c8a65cbd27bdb87c7505416703609919040c7b" gracePeriod=600 Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.715976 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-0133-account-create-jdvz5"] Oct 09 09:20:05 crc kubenswrapper[4710]: E1009 09:20:05.716293 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fe9051e-5ccf-4754-9dd0-264bc9b32e9b" containerName="mariadb-database-create" Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.716307 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fe9051e-5ccf-4754-9dd0-264bc9b32e9b" containerName="mariadb-database-create" Oct 09 09:20:05 crc kubenswrapper[4710]: E1009 09:20:05.716326 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bfaedef-c78e-4e3f-88a4-820aa84cb116" containerName="mariadb-database-create" Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.716332 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bfaedef-c78e-4e3f-88a4-820aa84cb116" containerName="mariadb-database-create" Oct 09 09:20:05 crc kubenswrapper[4710]: E1009 09:20:05.716347 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6c6070b-6e0d-45a9-bbd8-5d28cd740a11" containerName="mariadb-database-create" Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.716353 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6c6070b-6e0d-45a9-bbd8-5d28cd740a11" containerName="mariadb-database-create" Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.717715 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fe9051e-5ccf-4754-9dd0-264bc9b32e9b" containerName="mariadb-database-create" Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.717737 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bfaedef-c78e-4e3f-88a4-820aa84cb116" containerName="mariadb-database-create" Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.717754 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6c6070b-6e0d-45a9-bbd8-5d28cd740a11" containerName="mariadb-database-create" Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.718273 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0133-account-create-jdvz5" Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.720595 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.726279 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-0133-account-create-jdvz5"] Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.731911 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6sft8\" (UniqueName: \"kubernetes.io/projected/2d3e47ce-b689-4ecd-80f6-23cce6d416f6-kube-api-access-6sft8\") pod \"barbican-0133-account-create-jdvz5\" (UID: \"2d3e47ce-b689-4ecd-80f6-23cce6d416f6\") " pod="openstack/barbican-0133-account-create-jdvz5" Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.833534 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6sft8\" (UniqueName: \"kubernetes.io/projected/2d3e47ce-b689-4ecd-80f6-23cce6d416f6-kube-api-access-6sft8\") pod \"barbican-0133-account-create-jdvz5\" (UID: \"2d3e47ce-b689-4ecd-80f6-23cce6d416f6\") " pod="openstack/barbican-0133-account-create-jdvz5" Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.839749 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-322a-account-create-hn7b9"] Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.841043 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-322a-account-create-hn7b9" Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.846133 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.858151 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6sft8\" (UniqueName: \"kubernetes.io/projected/2d3e47ce-b689-4ecd-80f6-23cce6d416f6-kube-api-access-6sft8\") pod \"barbican-0133-account-create-jdvz5\" (UID: \"2d3e47ce-b689-4ecd-80f6-23cce6d416f6\") " pod="openstack/barbican-0133-account-create-jdvz5" Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.870126 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-322a-account-create-hn7b9"] Oct 09 09:20:05 crc kubenswrapper[4710]: I1009 09:20:05.938767 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hg6h5\" (UniqueName: \"kubernetes.io/projected/6a894075-d541-491f-8041-f8007027c7b3-kube-api-access-hg6h5\") pod \"cinder-322a-account-create-hn7b9\" (UID: \"6a894075-d541-491f-8041-f8007027c7b3\") " pod="openstack/cinder-322a-account-create-hn7b9" Oct 09 09:20:06 crc kubenswrapper[4710]: I1009 09:20:06.027324 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-b5b1-account-create-8776d"] Oct 09 09:20:06 crc kubenswrapper[4710]: I1009 09:20:06.029073 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b5b1-account-create-8776d" Oct 09 09:20:06 crc kubenswrapper[4710]: I1009 09:20:06.037621 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0133-account-create-jdvz5" Oct 09 09:20:06 crc kubenswrapper[4710]: I1009 09:20:06.039817 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 09 09:20:06 crc kubenswrapper[4710]: I1009 09:20:06.043035 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbx4h\" (UniqueName: \"kubernetes.io/projected/6f8a6e70-3050-4bd5-bf95-23c8c32a8add-kube-api-access-lbx4h\") pod \"neutron-b5b1-account-create-8776d\" (UID: \"6f8a6e70-3050-4bd5-bf95-23c8c32a8add\") " pod="openstack/neutron-b5b1-account-create-8776d" Oct 09 09:20:06 crc kubenswrapper[4710]: I1009 09:20:06.043415 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hg6h5\" (UniqueName: \"kubernetes.io/projected/6a894075-d541-491f-8041-f8007027c7b3-kube-api-access-hg6h5\") pod \"cinder-322a-account-create-hn7b9\" (UID: \"6a894075-d541-491f-8041-f8007027c7b3\") " pod="openstack/cinder-322a-account-create-hn7b9" Oct 09 09:20:06 crc kubenswrapper[4710]: I1009 09:20:06.049729 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-b5b1-account-create-8776d"] Oct 09 09:20:06 crc kubenswrapper[4710]: I1009 09:20:06.082474 4710 generic.go:334] "Generic (PLEG): container finished" podID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerID="8f8033ef1c1546ba9de192c838c8a65cbd27bdb87c7505416703609919040c7b" exitCode=0 Oct 09 09:20:06 crc kubenswrapper[4710]: I1009 09:20:06.082736 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerDied","Data":"8f8033ef1c1546ba9de192c838c8a65cbd27bdb87c7505416703609919040c7b"} Oct 09 09:20:06 crc kubenswrapper[4710]: I1009 09:20:06.082782 4710 scope.go:117] "RemoveContainer" containerID="4d64191b75158ae2723b09865a9bd6d7523a53aa9da415f79edde49e77da405d" Oct 09 09:20:06 crc kubenswrapper[4710]: I1009 09:20:06.087709 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hg6h5\" (UniqueName: \"kubernetes.io/projected/6a894075-d541-491f-8041-f8007027c7b3-kube-api-access-hg6h5\") pod \"cinder-322a-account-create-hn7b9\" (UID: \"6a894075-d541-491f-8041-f8007027c7b3\") " pod="openstack/cinder-322a-account-create-hn7b9" Oct 09 09:20:06 crc kubenswrapper[4710]: I1009 09:20:06.149240 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbx4h\" (UniqueName: \"kubernetes.io/projected/6f8a6e70-3050-4bd5-bf95-23c8c32a8add-kube-api-access-lbx4h\") pod \"neutron-b5b1-account-create-8776d\" (UID: \"6f8a6e70-3050-4bd5-bf95-23c8c32a8add\") " pod="openstack/neutron-b5b1-account-create-8776d" Oct 09 09:20:06 crc kubenswrapper[4710]: I1009 09:20:06.169171 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbx4h\" (UniqueName: \"kubernetes.io/projected/6f8a6e70-3050-4bd5-bf95-23c8c32a8add-kube-api-access-lbx4h\") pod \"neutron-b5b1-account-create-8776d\" (UID: \"6f8a6e70-3050-4bd5-bf95-23c8c32a8add\") " pod="openstack/neutron-b5b1-account-create-8776d" Oct 09 09:20:06 crc kubenswrapper[4710]: I1009 09:20:06.187284 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-322a-account-create-hn7b9" Oct 09 09:20:06 crc kubenswrapper[4710]: I1009 09:20:06.346473 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b5b1-account-create-8776d" Oct 09 09:20:09 crc kubenswrapper[4710]: I1009 09:20:09.823915 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-m22wk" Oct 09 09:20:09 crc kubenswrapper[4710]: I1009 09:20:09.931081 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/233cf622-10b1-47bb-acf6-12f79fb6ea66-combined-ca-bundle\") pod \"233cf622-10b1-47bb-acf6-12f79fb6ea66\" (UID: \"233cf622-10b1-47bb-acf6-12f79fb6ea66\") " Oct 09 09:20:09 crc kubenswrapper[4710]: I1009 09:20:09.931297 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r262x\" (UniqueName: \"kubernetes.io/projected/233cf622-10b1-47bb-acf6-12f79fb6ea66-kube-api-access-r262x\") pod \"233cf622-10b1-47bb-acf6-12f79fb6ea66\" (UID: \"233cf622-10b1-47bb-acf6-12f79fb6ea66\") " Oct 09 09:20:09 crc kubenswrapper[4710]: I1009 09:20:09.931423 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/233cf622-10b1-47bb-acf6-12f79fb6ea66-config-data\") pod \"233cf622-10b1-47bb-acf6-12f79fb6ea66\" (UID: \"233cf622-10b1-47bb-acf6-12f79fb6ea66\") " Oct 09 09:20:09 crc kubenswrapper[4710]: I1009 09:20:09.938944 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/233cf622-10b1-47bb-acf6-12f79fb6ea66-kube-api-access-r262x" (OuterVolumeSpecName: "kube-api-access-r262x") pod "233cf622-10b1-47bb-acf6-12f79fb6ea66" (UID: "233cf622-10b1-47bb-acf6-12f79fb6ea66"). InnerVolumeSpecName "kube-api-access-r262x". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:20:09 crc kubenswrapper[4710]: I1009 09:20:09.968084 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/233cf622-10b1-47bb-acf6-12f79fb6ea66-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "233cf622-10b1-47bb-acf6-12f79fb6ea66" (UID: "233cf622-10b1-47bb-acf6-12f79fb6ea66"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:20:09 crc kubenswrapper[4710]: I1009 09:20:09.986391 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/233cf622-10b1-47bb-acf6-12f79fb6ea66-config-data" (OuterVolumeSpecName: "config-data") pod "233cf622-10b1-47bb-acf6-12f79fb6ea66" (UID: "233cf622-10b1-47bb-acf6-12f79fb6ea66"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:20:10 crc kubenswrapper[4710]: I1009 09:20:10.033669 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r262x\" (UniqueName: \"kubernetes.io/projected/233cf622-10b1-47bb-acf6-12f79fb6ea66-kube-api-access-r262x\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:10 crc kubenswrapper[4710]: I1009 09:20:10.033897 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/233cf622-10b1-47bb-acf6-12f79fb6ea66-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:10 crc kubenswrapper[4710]: I1009 09:20:10.033908 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/233cf622-10b1-47bb-acf6-12f79fb6ea66-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:10 crc kubenswrapper[4710]: I1009 09:20:10.147043 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-b5b1-account-create-8776d"] Oct 09 09:20:10 crc kubenswrapper[4710]: I1009 09:20:10.162220 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-322a-account-create-hn7b9"] Oct 09 09:20:10 crc kubenswrapper[4710]: I1009 09:20:10.179510 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-m22wk" event={"ID":"233cf622-10b1-47bb-acf6-12f79fb6ea66","Type":"ContainerDied","Data":"900f0ece258e9ae8fbed6819f60afe389e7b17440594135bce9eac19139015a6"} Oct 09 09:20:10 crc kubenswrapper[4710]: I1009 09:20:10.179560 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="900f0ece258e9ae8fbed6819f60afe389e7b17440594135bce9eac19139015a6" Oct 09 09:20:10 crc kubenswrapper[4710]: I1009 09:20:10.179671 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-m22wk" Oct 09 09:20:10 crc kubenswrapper[4710]: I1009 09:20:10.187125 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerStarted","Data":"d29bcc87c0210ac354834c57bbe9818a4507c034cce7af695fa8a4fd8067649a"} Oct 09 09:20:10 crc kubenswrapper[4710]: I1009 09:20:10.224786 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-0133-account-create-jdvz5"] Oct 09 09:20:10 crc kubenswrapper[4710]: W1009 09:20:10.231961 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d3e47ce_b689_4ecd_80f6_23cce6d416f6.slice/crio-4839e1f3a04799fca2baa462277fd7c893856afe86c0b40f892e95f1d19ee21f WatchSource:0}: Error finding container 4839e1f3a04799fca2baa462277fd7c893856afe86c0b40f892e95f1d19ee21f: Status 404 returned error can't find the container with id 4839e1f3a04799fca2baa462277fd7c893856afe86c0b40f892e95f1d19ee21f Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.065209 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8"] Oct 09 09:20:11 crc kubenswrapper[4710]: E1009 09:20:11.066262 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="233cf622-10b1-47bb-acf6-12f79fb6ea66" containerName="keystone-db-sync" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.066281 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="233cf622-10b1-47bb-acf6-12f79fb6ea66" containerName="keystone-db-sync" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.066497 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="233cf622-10b1-47bb-acf6-12f79fb6ea66" containerName="keystone-db-sync" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.067498 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.105994 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-dqvzw"] Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.107158 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dqvzw" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.110070 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.110297 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.110445 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.110682 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-jdrf2" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.126260 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8"] Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.165517 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-ovsdbserver-sb\") pod \"dnsmasq-dns-5dcb7bb4dc-ffnh8\" (UID: \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\") " pod="openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.165603 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kqs6\" (UniqueName: \"kubernetes.io/projected/9223e283-9e4c-4e89-bfb8-d0eda7796b61-kube-api-access-6kqs6\") pod \"keystone-bootstrap-dqvzw\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " pod="openstack/keystone-bootstrap-dqvzw" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.165674 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-config\") pod \"dnsmasq-dns-5dcb7bb4dc-ffnh8\" (UID: \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\") " pod="openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.165721 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-credential-keys\") pod \"keystone-bootstrap-dqvzw\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " pod="openstack/keystone-bootstrap-dqvzw" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.165746 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-dns-svc\") pod \"dnsmasq-dns-5dcb7bb4dc-ffnh8\" (UID: \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\") " pod="openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.165785 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-scripts\") pod \"keystone-bootstrap-dqvzw\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " pod="openstack/keystone-bootstrap-dqvzw" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.165880 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snx6b\" (UniqueName: \"kubernetes.io/projected/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-kube-api-access-snx6b\") pod \"dnsmasq-dns-5dcb7bb4dc-ffnh8\" (UID: \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\") " pod="openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.165913 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-ovsdbserver-nb\") pod \"dnsmasq-dns-5dcb7bb4dc-ffnh8\" (UID: \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\") " pod="openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.165949 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-fernet-keys\") pod \"keystone-bootstrap-dqvzw\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " pod="openstack/keystone-bootstrap-dqvzw" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.165985 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-combined-ca-bundle\") pod \"keystone-bootstrap-dqvzw\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " pod="openstack/keystone-bootstrap-dqvzw" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.166009 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-config-data\") pod \"keystone-bootstrap-dqvzw\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " pod="openstack/keystone-bootstrap-dqvzw" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.179188 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-dqvzw"] Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.201423 4710 generic.go:334] "Generic (PLEG): container finished" podID="6f8a6e70-3050-4bd5-bf95-23c8c32a8add" containerID="0570f356b61f41d1164ac3d6a0a7b018fbdc3febdf59bd4baefc85e0da8ab482" exitCode=0 Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.201541 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b5b1-account-create-8776d" event={"ID":"6f8a6e70-3050-4bd5-bf95-23c8c32a8add","Type":"ContainerDied","Data":"0570f356b61f41d1164ac3d6a0a7b018fbdc3febdf59bd4baefc85e0da8ab482"} Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.201568 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b5b1-account-create-8776d" event={"ID":"6f8a6e70-3050-4bd5-bf95-23c8c32a8add","Type":"ContainerStarted","Data":"4b4d42c0fe85c9aff547716b746d194827f15b14784a6eae21dccee3cb4b5f6c"} Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.202906 4710 generic.go:334] "Generic (PLEG): container finished" podID="6a894075-d541-491f-8041-f8007027c7b3" containerID="ffc5b24229ebec1b23d9f028d75ccc5c3a026f21f005c3127f24227c1323dcde" exitCode=0 Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.203001 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-322a-account-create-hn7b9" event={"ID":"6a894075-d541-491f-8041-f8007027c7b3","Type":"ContainerDied","Data":"ffc5b24229ebec1b23d9f028d75ccc5c3a026f21f005c3127f24227c1323dcde"} Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.203057 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-322a-account-create-hn7b9" event={"ID":"6a894075-d541-491f-8041-f8007027c7b3","Type":"ContainerStarted","Data":"85d58818f9416618f16e5f9add84038996398c673b2acbf8d4459abe498f815f"} Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.204370 4710 generic.go:334] "Generic (PLEG): container finished" podID="2d3e47ce-b689-4ecd-80f6-23cce6d416f6" containerID="68beddf809561a9376a5f6aacada0c20303dd041a88fd11bbfc81c76b6acd473" exitCode=0 Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.204417 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0133-account-create-jdvz5" event={"ID":"2d3e47ce-b689-4ecd-80f6-23cce6d416f6","Type":"ContainerDied","Data":"68beddf809561a9376a5f6aacada0c20303dd041a88fd11bbfc81c76b6acd473"} Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.204446 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0133-account-create-jdvz5" event={"ID":"2d3e47ce-b689-4ecd-80f6-23cce6d416f6","Type":"ContainerStarted","Data":"4839e1f3a04799fca2baa462277fd7c893856afe86c0b40f892e95f1d19ee21f"} Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.208147 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-5pkvd" event={"ID":"b43105bc-a07d-402a-bef9-d4946f2827c3","Type":"ContainerStarted","Data":"71ff411d1e1d3d0ed55a133fb52951d8ec254dbefcec84a98a3a8e0a757f1e1c"} Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.270138 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snx6b\" (UniqueName: \"kubernetes.io/projected/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-kube-api-access-snx6b\") pod \"dnsmasq-dns-5dcb7bb4dc-ffnh8\" (UID: \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\") " pod="openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.270228 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-ovsdbserver-nb\") pod \"dnsmasq-dns-5dcb7bb4dc-ffnh8\" (UID: \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\") " pod="openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.270262 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-fernet-keys\") pod \"keystone-bootstrap-dqvzw\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " pod="openstack/keystone-bootstrap-dqvzw" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.270282 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-combined-ca-bundle\") pod \"keystone-bootstrap-dqvzw\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " pod="openstack/keystone-bootstrap-dqvzw" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.270300 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-config-data\") pod \"keystone-bootstrap-dqvzw\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " pod="openstack/keystone-bootstrap-dqvzw" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.270375 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-ovsdbserver-sb\") pod \"dnsmasq-dns-5dcb7bb4dc-ffnh8\" (UID: \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\") " pod="openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.270409 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kqs6\" (UniqueName: \"kubernetes.io/projected/9223e283-9e4c-4e89-bfb8-d0eda7796b61-kube-api-access-6kqs6\") pod \"keystone-bootstrap-dqvzw\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " pod="openstack/keystone-bootstrap-dqvzw" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.270478 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-config\") pod \"dnsmasq-dns-5dcb7bb4dc-ffnh8\" (UID: \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\") " pod="openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.270517 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-credential-keys\") pod \"keystone-bootstrap-dqvzw\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " pod="openstack/keystone-bootstrap-dqvzw" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.270543 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-dns-svc\") pod \"dnsmasq-dns-5dcb7bb4dc-ffnh8\" (UID: \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\") " pod="openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.270565 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-scripts\") pod \"keystone-bootstrap-dqvzw\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " pod="openstack/keystone-bootstrap-dqvzw" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.272915 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-dns-svc\") pod \"dnsmasq-dns-5dcb7bb4dc-ffnh8\" (UID: \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\") " pod="openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.274400 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-config\") pod \"dnsmasq-dns-5dcb7bb4dc-ffnh8\" (UID: \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\") " pod="openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.282916 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-ovsdbserver-sb\") pod \"dnsmasq-dns-5dcb7bb4dc-ffnh8\" (UID: \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\") " pod="openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.283359 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-ovsdbserver-nb\") pod \"dnsmasq-dns-5dcb7bb4dc-ffnh8\" (UID: \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\") " pod="openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.290324 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-config-data\") pod \"keystone-bootstrap-dqvzw\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " pod="openstack/keystone-bootstrap-dqvzw" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.290865 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-fernet-keys\") pod \"keystone-bootstrap-dqvzw\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " pod="openstack/keystone-bootstrap-dqvzw" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.295656 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-combined-ca-bundle\") pod \"keystone-bootstrap-dqvzw\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " pod="openstack/keystone-bootstrap-dqvzw" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.302890 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-scripts\") pod \"keystone-bootstrap-dqvzw\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " pod="openstack/keystone-bootstrap-dqvzw" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.326058 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-5pkvd" podStartSLOduration=2.448747162 podStartE2EDuration="17.326035637s" podCreationTimestamp="2025-10-09 09:19:54 +0000 UTC" firstStartedPulling="2025-10-09 09:19:54.830282008 +0000 UTC m=+918.320390405" lastFinishedPulling="2025-10-09 09:20:09.707570483 +0000 UTC m=+933.197678880" observedRunningTime="2025-10-09 09:20:11.302894432 +0000 UTC m=+934.793002829" watchObservedRunningTime="2025-10-09 09:20:11.326035637 +0000 UTC m=+934.816144034" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.326741 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-credential-keys\") pod \"keystone-bootstrap-dqvzw\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " pod="openstack/keystone-bootstrap-dqvzw" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.327364 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snx6b\" (UniqueName: \"kubernetes.io/projected/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-kube-api-access-snx6b\") pod \"dnsmasq-dns-5dcb7bb4dc-ffnh8\" (UID: \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\") " pod="openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.332804 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kqs6\" (UniqueName: \"kubernetes.io/projected/9223e283-9e4c-4e89-bfb8-d0eda7796b61-kube-api-access-6kqs6\") pod \"keystone-bootstrap-dqvzw\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " pod="openstack/keystone-bootstrap-dqvzw" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.383829 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.384820 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.387007 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.402468 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.402707 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.413016 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.433857 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dqvzw" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.472992 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-config-data\") pod \"ceilometer-0\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.473208 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.473299 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.473401 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-scripts\") pod \"ceilometer-0\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.473516 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a28920d-7c53-4e95-990d-d7499229899f-log-httpd\") pod \"ceilometer-0\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.473587 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a28920d-7c53-4e95-990d-d7499229899f-run-httpd\") pod \"ceilometer-0\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.473670 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mn6xx\" (UniqueName: \"kubernetes.io/projected/2a28920d-7c53-4e95-990d-d7499229899f-kube-api-access-mn6xx\") pod \"ceilometer-0\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.483752 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8"] Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.513022 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-jnpq9"] Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.514288 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jnpq9" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.520030 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.520211 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-v52c5" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.520811 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.529665 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-jnpq9"] Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.550045 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7f549f7b99-6bx4v"] Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.551607 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.580223 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f549f7b99-6bx4v"] Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.585093 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-combined-ca-bundle\") pod \"placement-db-sync-jnpq9\" (UID: \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\") " pod="openstack/placement-db-sync-jnpq9" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.585152 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-config-data\") pod \"placement-db-sync-jnpq9\" (UID: \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\") " pod="openstack/placement-db-sync-jnpq9" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.585211 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-config-data\") pod \"ceilometer-0\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.585260 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.585282 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.585311 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-logs\") pod \"placement-db-sync-jnpq9\" (UID: \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\") " pod="openstack/placement-db-sync-jnpq9" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.585332 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-scripts\") pod \"ceilometer-0\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.585353 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a28920d-7c53-4e95-990d-d7499229899f-log-httpd\") pod \"ceilometer-0\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.585371 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a28920d-7c53-4e95-990d-d7499229899f-run-httpd\") pod \"ceilometer-0\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.585384 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-scripts\") pod \"placement-db-sync-jnpq9\" (UID: \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\") " pod="openstack/placement-db-sync-jnpq9" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.585414 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dm7pw\" (UniqueName: \"kubernetes.io/projected/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-kube-api-access-dm7pw\") pod \"placement-db-sync-jnpq9\" (UID: \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\") " pod="openstack/placement-db-sync-jnpq9" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.585442 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mn6xx\" (UniqueName: \"kubernetes.io/projected/2a28920d-7c53-4e95-990d-d7499229899f-kube-api-access-mn6xx\") pod \"ceilometer-0\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.587092 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a28920d-7c53-4e95-990d-d7499229899f-run-httpd\") pod \"ceilometer-0\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.589040 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a28920d-7c53-4e95-990d-d7499229899f-log-httpd\") pod \"ceilometer-0\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.605773 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.615296 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-scripts\") pod \"ceilometer-0\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.620314 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.620977 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-config-data\") pod \"ceilometer-0\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.631955 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mn6xx\" (UniqueName: \"kubernetes.io/projected/2a28920d-7c53-4e95-990d-d7499229899f-kube-api-access-mn6xx\") pod \"ceilometer-0\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.693994 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-config-data\") pod \"placement-db-sync-jnpq9\" (UID: \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\") " pod="openstack/placement-db-sync-jnpq9" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.694076 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-dns-svc\") pod \"dnsmasq-dns-7f549f7b99-6bx4v\" (UID: \"b964db51-b1b4-4328-8134-3e4adf82dd59\") " pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.694108 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-config\") pod \"dnsmasq-dns-7f549f7b99-6bx4v\" (UID: \"b964db51-b1b4-4328-8134-3e4adf82dd59\") " pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.694131 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-logs\") pod \"placement-db-sync-jnpq9\" (UID: \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\") " pod="openstack/placement-db-sync-jnpq9" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.694160 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-scripts\") pod \"placement-db-sync-jnpq9\" (UID: \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\") " pod="openstack/placement-db-sync-jnpq9" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.694191 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dm7pw\" (UniqueName: \"kubernetes.io/projected/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-kube-api-access-dm7pw\") pod \"placement-db-sync-jnpq9\" (UID: \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\") " pod="openstack/placement-db-sync-jnpq9" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.694224 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-ovsdbserver-sb\") pod \"dnsmasq-dns-7f549f7b99-6bx4v\" (UID: \"b964db51-b1b4-4328-8134-3e4adf82dd59\") " pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.694241 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t495k\" (UniqueName: \"kubernetes.io/projected/b964db51-b1b4-4328-8134-3e4adf82dd59-kube-api-access-t495k\") pod \"dnsmasq-dns-7f549f7b99-6bx4v\" (UID: \"b964db51-b1b4-4328-8134-3e4adf82dd59\") " pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.694257 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-combined-ca-bundle\") pod \"placement-db-sync-jnpq9\" (UID: \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\") " pod="openstack/placement-db-sync-jnpq9" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.694282 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-ovsdbserver-nb\") pod \"dnsmasq-dns-7f549f7b99-6bx4v\" (UID: \"b964db51-b1b4-4328-8134-3e4adf82dd59\") " pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.703664 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-logs\") pod \"placement-db-sync-jnpq9\" (UID: \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\") " pod="openstack/placement-db-sync-jnpq9" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.703901 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-scripts\") pod \"placement-db-sync-jnpq9\" (UID: \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\") " pod="openstack/placement-db-sync-jnpq9" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.704946 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-combined-ca-bundle\") pod \"placement-db-sync-jnpq9\" (UID: \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\") " pod="openstack/placement-db-sync-jnpq9" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.724889 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.724996 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dm7pw\" (UniqueName: \"kubernetes.io/projected/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-kube-api-access-dm7pw\") pod \"placement-db-sync-jnpq9\" (UID: \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\") " pod="openstack/placement-db-sync-jnpq9" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.743640 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-config-data\") pod \"placement-db-sync-jnpq9\" (UID: \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\") " pod="openstack/placement-db-sync-jnpq9" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.796672 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-ovsdbserver-sb\") pod \"dnsmasq-dns-7f549f7b99-6bx4v\" (UID: \"b964db51-b1b4-4328-8134-3e4adf82dd59\") " pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.796720 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t495k\" (UniqueName: \"kubernetes.io/projected/b964db51-b1b4-4328-8134-3e4adf82dd59-kube-api-access-t495k\") pod \"dnsmasq-dns-7f549f7b99-6bx4v\" (UID: \"b964db51-b1b4-4328-8134-3e4adf82dd59\") " pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.796760 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-ovsdbserver-nb\") pod \"dnsmasq-dns-7f549f7b99-6bx4v\" (UID: \"b964db51-b1b4-4328-8134-3e4adf82dd59\") " pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.796812 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-dns-svc\") pod \"dnsmasq-dns-7f549f7b99-6bx4v\" (UID: \"b964db51-b1b4-4328-8134-3e4adf82dd59\") " pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.796843 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-config\") pod \"dnsmasq-dns-7f549f7b99-6bx4v\" (UID: \"b964db51-b1b4-4328-8134-3e4adf82dd59\") " pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.799734 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-config\") pod \"dnsmasq-dns-7f549f7b99-6bx4v\" (UID: \"b964db51-b1b4-4328-8134-3e4adf82dd59\") " pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.800095 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-ovsdbserver-sb\") pod \"dnsmasq-dns-7f549f7b99-6bx4v\" (UID: \"b964db51-b1b4-4328-8134-3e4adf82dd59\") " pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.802390 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-ovsdbserver-nb\") pod \"dnsmasq-dns-7f549f7b99-6bx4v\" (UID: \"b964db51-b1b4-4328-8134-3e4adf82dd59\") " pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.804324 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-dns-svc\") pod \"dnsmasq-dns-7f549f7b99-6bx4v\" (UID: \"b964db51-b1b4-4328-8134-3e4adf82dd59\") " pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.831365 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t495k\" (UniqueName: \"kubernetes.io/projected/b964db51-b1b4-4328-8134-3e4adf82dd59-kube-api-access-t495k\") pod \"dnsmasq-dns-7f549f7b99-6bx4v\" (UID: \"b964db51-b1b4-4328-8134-3e4adf82dd59\") " pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.850918 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jnpq9" Oct 09 09:20:11 crc kubenswrapper[4710]: I1009 09:20:11.913125 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" Oct 09 09:20:12 crc kubenswrapper[4710]: I1009 09:20:12.180367 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8"] Oct 09 09:20:12 crc kubenswrapper[4710]: I1009 09:20:12.240057 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8" event={"ID":"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701","Type":"ContainerStarted","Data":"e09781799d5f137f5c50a6e6879d130f5f2e09258a030209108511ee53732117"} Oct 09 09:20:12 crc kubenswrapper[4710]: I1009 09:20:12.345325 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-dqvzw"] Oct 09 09:20:12 crc kubenswrapper[4710]: I1009 09:20:12.485225 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:20:12 crc kubenswrapper[4710]: I1009 09:20:12.503404 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-jnpq9"] Oct 09 09:20:12 crc kubenswrapper[4710]: I1009 09:20:12.627787 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f549f7b99-6bx4v"] Oct 09 09:20:12 crc kubenswrapper[4710]: I1009 09:20:12.901841 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0133-account-create-jdvz5" Oct 09 09:20:12 crc kubenswrapper[4710]: I1009 09:20:12.905136 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-322a-account-create-hn7b9" Oct 09 09:20:12 crc kubenswrapper[4710]: I1009 09:20:12.920238 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b5b1-account-create-8776d" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.044354 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbx4h\" (UniqueName: \"kubernetes.io/projected/6f8a6e70-3050-4bd5-bf95-23c8c32a8add-kube-api-access-lbx4h\") pod \"6f8a6e70-3050-4bd5-bf95-23c8c32a8add\" (UID: \"6f8a6e70-3050-4bd5-bf95-23c8c32a8add\") " Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.044785 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6sft8\" (UniqueName: \"kubernetes.io/projected/2d3e47ce-b689-4ecd-80f6-23cce6d416f6-kube-api-access-6sft8\") pod \"2d3e47ce-b689-4ecd-80f6-23cce6d416f6\" (UID: \"2d3e47ce-b689-4ecd-80f6-23cce6d416f6\") " Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.044915 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hg6h5\" (UniqueName: \"kubernetes.io/projected/6a894075-d541-491f-8041-f8007027c7b3-kube-api-access-hg6h5\") pod \"6a894075-d541-491f-8041-f8007027c7b3\" (UID: \"6a894075-d541-491f-8041-f8007027c7b3\") " Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.051090 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f8a6e70-3050-4bd5-bf95-23c8c32a8add-kube-api-access-lbx4h" (OuterVolumeSpecName: "kube-api-access-lbx4h") pod "6f8a6e70-3050-4bd5-bf95-23c8c32a8add" (UID: "6f8a6e70-3050-4bd5-bf95-23c8c32a8add"). InnerVolumeSpecName "kube-api-access-lbx4h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.053787 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d3e47ce-b689-4ecd-80f6-23cce6d416f6-kube-api-access-6sft8" (OuterVolumeSpecName: "kube-api-access-6sft8") pod "2d3e47ce-b689-4ecd-80f6-23cce6d416f6" (UID: "2d3e47ce-b689-4ecd-80f6-23cce6d416f6"). InnerVolumeSpecName "kube-api-access-6sft8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.061638 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a894075-d541-491f-8041-f8007027c7b3-kube-api-access-hg6h5" (OuterVolumeSpecName: "kube-api-access-hg6h5") pod "6a894075-d541-491f-8041-f8007027c7b3" (UID: "6a894075-d541-491f-8041-f8007027c7b3"). InnerVolumeSpecName "kube-api-access-hg6h5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.148149 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hg6h5\" (UniqueName: \"kubernetes.io/projected/6a894075-d541-491f-8041-f8007027c7b3-kube-api-access-hg6h5\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.148499 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbx4h\" (UniqueName: \"kubernetes.io/projected/6f8a6e70-3050-4bd5-bf95-23c8c32a8add-kube-api-access-lbx4h\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.148587 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6sft8\" (UniqueName: \"kubernetes.io/projected/2d3e47ce-b689-4ecd-80f6-23cce6d416f6-kube-api-access-6sft8\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.272781 4710 generic.go:334] "Generic (PLEG): container finished" podID="b964db51-b1b4-4328-8134-3e4adf82dd59" containerID="0751473b3b0f75d39e44c7f266d286aee38e49222e5d6e405643b5c36ac12c45" exitCode=0 Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.272863 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" event={"ID":"b964db51-b1b4-4328-8134-3e4adf82dd59","Type":"ContainerDied","Data":"0751473b3b0f75d39e44c7f266d286aee38e49222e5d6e405643b5c36ac12c45"} Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.272895 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" event={"ID":"b964db51-b1b4-4328-8134-3e4adf82dd59","Type":"ContainerStarted","Data":"5483b67b71dc0184a6aa0a6f3478671dbaa3f91c5a417aa5041bb6106d9abe1e"} Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.307666 4710 generic.go:334] "Generic (PLEG): container finished" podID="8d7ba2e8-c662-4342-b5f9-c56fb6d7f701" containerID="d01c4bd6e51c3998ca00a3f72325d74a4903d0f1ac6ea7bd6823424f4edc2b7a" exitCode=0 Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.307749 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8" event={"ID":"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701","Type":"ContainerDied","Data":"d01c4bd6e51c3998ca00a3f72325d74a4903d0f1ac6ea7bd6823424f4edc2b7a"} Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.327563 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-b5b1-account-create-8776d" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.327589 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-b5b1-account-create-8776d" event={"ID":"6f8a6e70-3050-4bd5-bf95-23c8c32a8add","Type":"ContainerDied","Data":"4b4d42c0fe85c9aff547716b746d194827f15b14784a6eae21dccee3cb4b5f6c"} Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.327636 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b4d42c0fe85c9aff547716b746d194827f15b14784a6eae21dccee3cb4b5f6c" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.330181 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-322a-account-create-hn7b9" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.330319 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-322a-account-create-hn7b9" event={"ID":"6a894075-d541-491f-8041-f8007027c7b3","Type":"ContainerDied","Data":"85d58818f9416618f16e5f9add84038996398c673b2acbf8d4459abe498f815f"} Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.330401 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="85d58818f9416618f16e5f9add84038996398c673b2acbf8d4459abe498f815f" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.339998 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a28920d-7c53-4e95-990d-d7499229899f","Type":"ContainerStarted","Data":"5f75592491efc617f54556bd7da0ef42211f0aa280caf323d01ebfef663fdcbc"} Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.356030 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0133-account-create-jdvz5" event={"ID":"2d3e47ce-b689-4ecd-80f6-23cce6d416f6","Type":"ContainerDied","Data":"4839e1f3a04799fca2baa462277fd7c893856afe86c0b40f892e95f1d19ee21f"} Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.356067 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4839e1f3a04799fca2baa462277fd7c893856afe86c0b40f892e95f1d19ee21f" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.356127 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0133-account-create-jdvz5" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.361024 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.372228 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dqvzw" event={"ID":"9223e283-9e4c-4e89-bfb8-d0eda7796b61","Type":"ContainerStarted","Data":"7e8d759790222815df192950d1161c0b963485de496ebb8f8ff14a7af3befb8e"} Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.372341 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dqvzw" event={"ID":"9223e283-9e4c-4e89-bfb8-d0eda7796b61","Type":"ContainerStarted","Data":"845d36171313fb10476c03c920bac71b6c540a37786e14620085b24247485dbd"} Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.378746 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jnpq9" event={"ID":"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d","Type":"ContainerStarted","Data":"78883747ec1495e9338c72f829da3542b0ede207910abd0d05514409dc1947c5"} Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.405607 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-dqvzw" podStartSLOduration=2.405594243 podStartE2EDuration="2.405594243s" podCreationTimestamp="2025-10-09 09:20:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:20:13.401216808 +0000 UTC m=+936.891325196" watchObservedRunningTime="2025-10-09 09:20:13.405594243 +0000 UTC m=+936.895702630" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.561447 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.681376 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-snx6b\" (UniqueName: \"kubernetes.io/projected/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-kube-api-access-snx6b\") pod \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\" (UID: \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\") " Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.681420 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-dns-svc\") pod \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\" (UID: \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\") " Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.681510 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-config\") pod \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\" (UID: \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\") " Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.681536 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-ovsdbserver-nb\") pod \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\" (UID: \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\") " Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.681562 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-ovsdbserver-sb\") pod \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\" (UID: \"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701\") " Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.692732 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-kube-api-access-snx6b" (OuterVolumeSpecName: "kube-api-access-snx6b") pod "8d7ba2e8-c662-4342-b5f9-c56fb6d7f701" (UID: "8d7ba2e8-c662-4342-b5f9-c56fb6d7f701"). InnerVolumeSpecName "kube-api-access-snx6b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.707039 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8d7ba2e8-c662-4342-b5f9-c56fb6d7f701" (UID: "8d7ba2e8-c662-4342-b5f9-c56fb6d7f701"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.711747 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8d7ba2e8-c662-4342-b5f9-c56fb6d7f701" (UID: "8d7ba2e8-c662-4342-b5f9-c56fb6d7f701"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.714257 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8d7ba2e8-c662-4342-b5f9-c56fb6d7f701" (UID: "8d7ba2e8-c662-4342-b5f9-c56fb6d7f701"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.714495 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-config" (OuterVolumeSpecName: "config") pod "8d7ba2e8-c662-4342-b5f9-c56fb6d7f701" (UID: "8d7ba2e8-c662-4342-b5f9-c56fb6d7f701"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.785580 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-snx6b\" (UniqueName: \"kubernetes.io/projected/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-kube-api-access-snx6b\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.785648 4710 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.785669 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.785683 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:13 crc kubenswrapper[4710]: I1009 09:20:13.785693 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:14 crc kubenswrapper[4710]: I1009 09:20:14.404079 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" event={"ID":"b964db51-b1b4-4328-8134-3e4adf82dd59","Type":"ContainerStarted","Data":"cb200e797aebf6edf00497670b39540bb193adbc4ae78b4c6fcb0b365698928c"} Oct 09 09:20:14 crc kubenswrapper[4710]: I1009 09:20:14.404440 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" Oct 09 09:20:14 crc kubenswrapper[4710]: I1009 09:20:14.410649 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8" Oct 09 09:20:14 crc kubenswrapper[4710]: I1009 09:20:14.411027 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8" event={"ID":"8d7ba2e8-c662-4342-b5f9-c56fb6d7f701","Type":"ContainerDied","Data":"e09781799d5f137f5c50a6e6879d130f5f2e09258a030209108511ee53732117"} Oct 09 09:20:14 crc kubenswrapper[4710]: I1009 09:20:14.411128 4710 scope.go:117] "RemoveContainer" containerID="d01c4bd6e51c3998ca00a3f72325d74a4903d0f1ac6ea7bd6823424f4edc2b7a" Oct 09 09:20:14 crc kubenswrapper[4710]: I1009 09:20:14.427934 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" podStartSLOduration=3.4279105420000002 podStartE2EDuration="3.427910542s" podCreationTimestamp="2025-10-09 09:20:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:20:14.420063635 +0000 UTC m=+937.910172032" watchObservedRunningTime="2025-10-09 09:20:14.427910542 +0000 UTC m=+937.918018939" Oct 09 09:20:14 crc kubenswrapper[4710]: I1009 09:20:14.576618 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8"] Oct 09 09:20:14 crc kubenswrapper[4710]: I1009 09:20:14.586415 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5dcb7bb4dc-ffnh8"] Oct 09 09:20:14 crc kubenswrapper[4710]: I1009 09:20:14.825055 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d7ba2e8-c662-4342-b5f9-c56fb6d7f701" path="/var/lib/kubelet/pods/8d7ba2e8-c662-4342-b5f9-c56fb6d7f701/volumes" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.109932 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-g5glz"] Oct 09 09:20:16 crc kubenswrapper[4710]: E1009 09:20:16.110535 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f8a6e70-3050-4bd5-bf95-23c8c32a8add" containerName="mariadb-account-create" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.110551 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f8a6e70-3050-4bd5-bf95-23c8c32a8add" containerName="mariadb-account-create" Oct 09 09:20:16 crc kubenswrapper[4710]: E1009 09:20:16.110570 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d3e47ce-b689-4ecd-80f6-23cce6d416f6" containerName="mariadb-account-create" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.110575 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d3e47ce-b689-4ecd-80f6-23cce6d416f6" containerName="mariadb-account-create" Oct 09 09:20:16 crc kubenswrapper[4710]: E1009 09:20:16.110589 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a894075-d541-491f-8041-f8007027c7b3" containerName="mariadb-account-create" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.110595 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a894075-d541-491f-8041-f8007027c7b3" containerName="mariadb-account-create" Oct 09 09:20:16 crc kubenswrapper[4710]: E1009 09:20:16.110605 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d7ba2e8-c662-4342-b5f9-c56fb6d7f701" containerName="init" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.110610 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d7ba2e8-c662-4342-b5f9-c56fb6d7f701" containerName="init" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.110768 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d3e47ce-b689-4ecd-80f6-23cce6d416f6" containerName="mariadb-account-create" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.110783 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d7ba2e8-c662-4342-b5f9-c56fb6d7f701" containerName="init" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.110798 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a894075-d541-491f-8041-f8007027c7b3" containerName="mariadb-account-create" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.110808 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f8a6e70-3050-4bd5-bf95-23c8c32a8add" containerName="mariadb-account-create" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.111313 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-g5glz" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.113761 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-vqrvr" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.113967 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.125116 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-g5glz"] Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.231674 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-rmqx2"] Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.233857 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-rmqx2" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.242727 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-zkcnb" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.242862 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.245871 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.251021 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-rmqx2"] Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.268197 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/972b488b-f691-4100-b183-692b2e27f665-db-sync-config-data\") pod \"barbican-db-sync-g5glz\" (UID: \"972b488b-f691-4100-b183-692b2e27f665\") " pod="openstack/barbican-db-sync-g5glz" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.269295 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-etc-machine-id\") pod \"cinder-db-sync-rmqx2\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " pod="openstack/cinder-db-sync-rmqx2" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.269374 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8c9ft\" (UniqueName: \"kubernetes.io/projected/972b488b-f691-4100-b183-692b2e27f665-kube-api-access-8c9ft\") pod \"barbican-db-sync-g5glz\" (UID: \"972b488b-f691-4100-b183-692b2e27f665\") " pod="openstack/barbican-db-sync-g5glz" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.269589 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/972b488b-f691-4100-b183-692b2e27f665-combined-ca-bundle\") pod \"barbican-db-sync-g5glz\" (UID: \"972b488b-f691-4100-b183-692b2e27f665\") " pod="openstack/barbican-db-sync-g5glz" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.370730 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-combined-ca-bundle\") pod \"cinder-db-sync-rmqx2\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " pod="openstack/cinder-db-sync-rmqx2" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.370855 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/972b488b-f691-4100-b183-692b2e27f665-db-sync-config-data\") pod \"barbican-db-sync-g5glz\" (UID: \"972b488b-f691-4100-b183-692b2e27f665\") " pod="openstack/barbican-db-sync-g5glz" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.370879 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-scripts\") pod \"cinder-db-sync-rmqx2\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " pod="openstack/cinder-db-sync-rmqx2" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.370899 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-etc-machine-id\") pod \"cinder-db-sync-rmqx2\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " pod="openstack/cinder-db-sync-rmqx2" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.370922 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8t9mg\" (UniqueName: \"kubernetes.io/projected/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-kube-api-access-8t9mg\") pod \"cinder-db-sync-rmqx2\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " pod="openstack/cinder-db-sync-rmqx2" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.370964 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8c9ft\" (UniqueName: \"kubernetes.io/projected/972b488b-f691-4100-b183-692b2e27f665-kube-api-access-8c9ft\") pod \"barbican-db-sync-g5glz\" (UID: \"972b488b-f691-4100-b183-692b2e27f665\") " pod="openstack/barbican-db-sync-g5glz" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.371045 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-db-sync-config-data\") pod \"cinder-db-sync-rmqx2\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " pod="openstack/cinder-db-sync-rmqx2" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.371151 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-etc-machine-id\") pod \"cinder-db-sync-rmqx2\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " pod="openstack/cinder-db-sync-rmqx2" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.371271 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/972b488b-f691-4100-b183-692b2e27f665-combined-ca-bundle\") pod \"barbican-db-sync-g5glz\" (UID: \"972b488b-f691-4100-b183-692b2e27f665\") " pod="openstack/barbican-db-sync-g5glz" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.371334 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-config-data\") pod \"cinder-db-sync-rmqx2\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " pod="openstack/cinder-db-sync-rmqx2" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.379077 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/972b488b-f691-4100-b183-692b2e27f665-db-sync-config-data\") pod \"barbican-db-sync-g5glz\" (UID: \"972b488b-f691-4100-b183-692b2e27f665\") " pod="openstack/barbican-db-sync-g5glz" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.380609 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/972b488b-f691-4100-b183-692b2e27f665-combined-ca-bundle\") pod \"barbican-db-sync-g5glz\" (UID: \"972b488b-f691-4100-b183-692b2e27f665\") " pod="openstack/barbican-db-sync-g5glz" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.387401 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8c9ft\" (UniqueName: \"kubernetes.io/projected/972b488b-f691-4100-b183-692b2e27f665-kube-api-access-8c9ft\") pod \"barbican-db-sync-g5glz\" (UID: \"972b488b-f691-4100-b183-692b2e27f665\") " pod="openstack/barbican-db-sync-g5glz" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.410015 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-6fx6n"] Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.411366 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6fx6n" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.414609 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.415699 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-p6jp2" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.417606 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.418619 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-6fx6n"] Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.433167 4710 generic.go:334] "Generic (PLEG): container finished" podID="9223e283-9e4c-4e89-bfb8-d0eda7796b61" containerID="7e8d759790222815df192950d1161c0b963485de496ebb8f8ff14a7af3befb8e" exitCode=0 Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.433218 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dqvzw" event={"ID":"9223e283-9e4c-4e89-bfb8-d0eda7796b61","Type":"ContainerDied","Data":"7e8d759790222815df192950d1161c0b963485de496ebb8f8ff14a7af3befb8e"} Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.459941 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-g5glz" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.473205 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-db-sync-config-data\") pod \"cinder-db-sync-rmqx2\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " pod="openstack/cinder-db-sync-rmqx2" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.473278 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a69a4b9-5080-4c08-bae1-86c06523c8d1-combined-ca-bundle\") pod \"neutron-db-sync-6fx6n\" (UID: \"1a69a4b9-5080-4c08-bae1-86c06523c8d1\") " pod="openstack/neutron-db-sync-6fx6n" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.473307 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgbrq\" (UniqueName: \"kubernetes.io/projected/1a69a4b9-5080-4c08-bae1-86c06523c8d1-kube-api-access-lgbrq\") pod \"neutron-db-sync-6fx6n\" (UID: \"1a69a4b9-5080-4c08-bae1-86c06523c8d1\") " pod="openstack/neutron-db-sync-6fx6n" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.473329 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-config-data\") pod \"cinder-db-sync-rmqx2\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " pod="openstack/cinder-db-sync-rmqx2" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.473361 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-combined-ca-bundle\") pod \"cinder-db-sync-rmqx2\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " pod="openstack/cinder-db-sync-rmqx2" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.473393 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1a69a4b9-5080-4c08-bae1-86c06523c8d1-config\") pod \"neutron-db-sync-6fx6n\" (UID: \"1a69a4b9-5080-4c08-bae1-86c06523c8d1\") " pod="openstack/neutron-db-sync-6fx6n" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.473426 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-scripts\") pod \"cinder-db-sync-rmqx2\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " pod="openstack/cinder-db-sync-rmqx2" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.473461 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8t9mg\" (UniqueName: \"kubernetes.io/projected/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-kube-api-access-8t9mg\") pod \"cinder-db-sync-rmqx2\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " pod="openstack/cinder-db-sync-rmqx2" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.477881 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-db-sync-config-data\") pod \"cinder-db-sync-rmqx2\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " pod="openstack/cinder-db-sync-rmqx2" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.478329 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-config-data\") pod \"cinder-db-sync-rmqx2\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " pod="openstack/cinder-db-sync-rmqx2" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.479852 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-combined-ca-bundle\") pod \"cinder-db-sync-rmqx2\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " pod="openstack/cinder-db-sync-rmqx2" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.492397 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8t9mg\" (UniqueName: \"kubernetes.io/projected/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-kube-api-access-8t9mg\") pod \"cinder-db-sync-rmqx2\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " pod="openstack/cinder-db-sync-rmqx2" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.492944 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-scripts\") pod \"cinder-db-sync-rmqx2\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " pod="openstack/cinder-db-sync-rmqx2" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.555909 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-rmqx2" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.575008 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgbrq\" (UniqueName: \"kubernetes.io/projected/1a69a4b9-5080-4c08-bae1-86c06523c8d1-kube-api-access-lgbrq\") pod \"neutron-db-sync-6fx6n\" (UID: \"1a69a4b9-5080-4c08-bae1-86c06523c8d1\") " pod="openstack/neutron-db-sync-6fx6n" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.575211 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1a69a4b9-5080-4c08-bae1-86c06523c8d1-config\") pod \"neutron-db-sync-6fx6n\" (UID: \"1a69a4b9-5080-4c08-bae1-86c06523c8d1\") " pod="openstack/neutron-db-sync-6fx6n" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.575327 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a69a4b9-5080-4c08-bae1-86c06523c8d1-combined-ca-bundle\") pod \"neutron-db-sync-6fx6n\" (UID: \"1a69a4b9-5080-4c08-bae1-86c06523c8d1\") " pod="openstack/neutron-db-sync-6fx6n" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.580886 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a69a4b9-5080-4c08-bae1-86c06523c8d1-combined-ca-bundle\") pod \"neutron-db-sync-6fx6n\" (UID: \"1a69a4b9-5080-4c08-bae1-86c06523c8d1\") " pod="openstack/neutron-db-sync-6fx6n" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.582777 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/1a69a4b9-5080-4c08-bae1-86c06523c8d1-config\") pod \"neutron-db-sync-6fx6n\" (UID: \"1a69a4b9-5080-4c08-bae1-86c06523c8d1\") " pod="openstack/neutron-db-sync-6fx6n" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.593994 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgbrq\" (UniqueName: \"kubernetes.io/projected/1a69a4b9-5080-4c08-bae1-86c06523c8d1-kube-api-access-lgbrq\") pod \"neutron-db-sync-6fx6n\" (UID: \"1a69a4b9-5080-4c08-bae1-86c06523c8d1\") " pod="openstack/neutron-db-sync-6fx6n" Oct 09 09:20:16 crc kubenswrapper[4710]: I1009 09:20:16.769370 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6fx6n" Oct 09 09:20:17 crc kubenswrapper[4710]: I1009 09:20:17.444641 4710 generic.go:334] "Generic (PLEG): container finished" podID="b43105bc-a07d-402a-bef9-d4946f2827c3" containerID="71ff411d1e1d3d0ed55a133fb52951d8ec254dbefcec84a98a3a8e0a757f1e1c" exitCode=0 Oct 09 09:20:17 crc kubenswrapper[4710]: I1009 09:20:17.444726 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-5pkvd" event={"ID":"b43105bc-a07d-402a-bef9-d4946f2827c3","Type":"ContainerDied","Data":"71ff411d1e1d3d0ed55a133fb52951d8ec254dbefcec84a98a3a8e0a757f1e1c"} Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.739528 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dqvzw" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.764147 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-5pkvd" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.797227 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-config-data\") pod \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.797283 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-fernet-keys\") pod \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.797304 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-scripts\") pod \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.797384 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-combined-ca-bundle\") pod \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.797449 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6kqs6\" (UniqueName: \"kubernetes.io/projected/9223e283-9e4c-4e89-bfb8-d0eda7796b61-kube-api-access-6kqs6\") pod \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.797472 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b43105bc-a07d-402a-bef9-d4946f2827c3-db-sync-config-data\") pod \"b43105bc-a07d-402a-bef9-d4946f2827c3\" (UID: \"b43105bc-a07d-402a-bef9-d4946f2827c3\") " Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.797496 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b43105bc-a07d-402a-bef9-d4946f2827c3-combined-ca-bundle\") pod \"b43105bc-a07d-402a-bef9-d4946f2827c3\" (UID: \"b43105bc-a07d-402a-bef9-d4946f2827c3\") " Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.797579 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-credential-keys\") pod \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\" (UID: \"9223e283-9e4c-4e89-bfb8-d0eda7796b61\") " Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.797663 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dm9lt\" (UniqueName: \"kubernetes.io/projected/b43105bc-a07d-402a-bef9-d4946f2827c3-kube-api-access-dm9lt\") pod \"b43105bc-a07d-402a-bef9-d4946f2827c3\" (UID: \"b43105bc-a07d-402a-bef9-d4946f2827c3\") " Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.797690 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b43105bc-a07d-402a-bef9-d4946f2827c3-config-data\") pod \"b43105bc-a07d-402a-bef9-d4946f2827c3\" (UID: \"b43105bc-a07d-402a-bef9-d4946f2827c3\") " Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.807910 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9223e283-9e4c-4e89-bfb8-d0eda7796b61-kube-api-access-6kqs6" (OuterVolumeSpecName: "kube-api-access-6kqs6") pod "9223e283-9e4c-4e89-bfb8-d0eda7796b61" (UID: "9223e283-9e4c-4e89-bfb8-d0eda7796b61"). InnerVolumeSpecName "kube-api-access-6kqs6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.814993 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-scripts" (OuterVolumeSpecName: "scripts") pod "9223e283-9e4c-4e89-bfb8-d0eda7796b61" (UID: "9223e283-9e4c-4e89-bfb8-d0eda7796b61"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.815017 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "9223e283-9e4c-4e89-bfb8-d0eda7796b61" (UID: "9223e283-9e4c-4e89-bfb8-d0eda7796b61"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.815079 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "9223e283-9e4c-4e89-bfb8-d0eda7796b61" (UID: "9223e283-9e4c-4e89-bfb8-d0eda7796b61"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.815504 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b43105bc-a07d-402a-bef9-d4946f2827c3-kube-api-access-dm9lt" (OuterVolumeSpecName: "kube-api-access-dm9lt") pod "b43105bc-a07d-402a-bef9-d4946f2827c3" (UID: "b43105bc-a07d-402a-bef9-d4946f2827c3"). InnerVolumeSpecName "kube-api-access-dm9lt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.820207 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b43105bc-a07d-402a-bef9-d4946f2827c3-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b43105bc-a07d-402a-bef9-d4946f2827c3" (UID: "b43105bc-a07d-402a-bef9-d4946f2827c3"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.850589 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-config-data" (OuterVolumeSpecName: "config-data") pod "9223e283-9e4c-4e89-bfb8-d0eda7796b61" (UID: "9223e283-9e4c-4e89-bfb8-d0eda7796b61"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.858129 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b43105bc-a07d-402a-bef9-d4946f2827c3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b43105bc-a07d-402a-bef9-d4946f2827c3" (UID: "b43105bc-a07d-402a-bef9-d4946f2827c3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.884229 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9223e283-9e4c-4e89-bfb8-d0eda7796b61" (UID: "9223e283-9e4c-4e89-bfb8-d0eda7796b61"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.893807 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b43105bc-a07d-402a-bef9-d4946f2827c3-config-data" (OuterVolumeSpecName: "config-data") pod "b43105bc-a07d-402a-bef9-d4946f2827c3" (UID: "b43105bc-a07d-402a-bef9-d4946f2827c3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.898716 4710 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.898746 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.898756 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.898767 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6kqs6\" (UniqueName: \"kubernetes.io/projected/9223e283-9e4c-4e89-bfb8-d0eda7796b61-kube-api-access-6kqs6\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.898778 4710 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b43105bc-a07d-402a-bef9-d4946f2827c3-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.898787 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b43105bc-a07d-402a-bef9-d4946f2827c3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.898795 4710 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.898802 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dm9lt\" (UniqueName: \"kubernetes.io/projected/b43105bc-a07d-402a-bef9-d4946f2827c3-kube-api-access-dm9lt\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.898810 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b43105bc-a07d-402a-bef9-d4946f2827c3-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.898818 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9223e283-9e4c-4e89-bfb8-d0eda7796b61-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.915119 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.974568 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bc45f6dcf-8l6f9"] Oct 09 09:20:21 crc kubenswrapper[4710]: I1009 09:20:21.975029 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" podUID="56bbc07d-0741-4064-b135-11222d19a27f" containerName="dnsmasq-dns" containerID="cri-o://5c6aeedd69b3918fa9d76b3a8e01ae873664a8fb1a69071f0f2d04b28b413185" gracePeriod=10 Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.055792 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-6fx6n"] Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.139066 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-g5glz"] Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.158466 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-rmqx2"] Oct 09 09:20:22 crc kubenswrapper[4710]: W1009 09:20:22.161470 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc9e9fec6_00af_46a7_9a1f_a59b6b06969c.slice/crio-95e76f0736539dd373fb68b94a1b331e5b4e6458c78f063746f9a0c55c29fe65 WatchSource:0}: Error finding container 95e76f0736539dd373fb68b94a1b331e5b4e6458c78f063746f9a0c55c29fe65: Status 404 returned error can't find the container with id 95e76f0736539dd373fb68b94a1b331e5b4e6458c78f063746f9a0c55c29fe65 Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.413212 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.487405 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dqvzw" event={"ID":"9223e283-9e4c-4e89-bfb8-d0eda7796b61","Type":"ContainerDied","Data":"845d36171313fb10476c03c920bac71b6c540a37786e14620085b24247485dbd"} Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.487473 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="845d36171313fb10476c03c920bac71b6c540a37786e14620085b24247485dbd" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.487554 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dqvzw" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.494920 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jnpq9" event={"ID":"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d","Type":"ContainerStarted","Data":"4e0274b649334309e28136c5402649371d08cc0a325fc7305119e64daa2b7400"} Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.501456 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-g5glz" event={"ID":"972b488b-f691-4100-b183-692b2e27f665","Type":"ContainerStarted","Data":"e0d6876c489e07be999d882f98a1d123c4b8bc62e65699b5ca329c396f404c09"} Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.507239 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-5pkvd" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.507532 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-5pkvd" event={"ID":"b43105bc-a07d-402a-bef9-d4946f2827c3","Type":"ContainerDied","Data":"001e26d2769494423af007d00b24aa9d5d8690960474d862a1d7aecac4c05a61"} Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.507569 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="001e26d2769494423af007d00b24aa9d5d8690960474d862a1d7aecac4c05a61" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.508385 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-rmqx2" event={"ID":"c9e9fec6-00af-46a7-9a1f-a59b6b06969c","Type":"ContainerStarted","Data":"95e76f0736539dd373fb68b94a1b331e5b4e6458c78f063746f9a0c55c29fe65"} Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.509530 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6fx6n" event={"ID":"1a69a4b9-5080-4c08-bae1-86c06523c8d1","Type":"ContainerStarted","Data":"421a3a76cd9f2b97043f74dc6e354541cc3aaeab3dc648ace7265d8049c09dc4"} Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.509557 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6fx6n" event={"ID":"1a69a4b9-5080-4c08-bae1-86c06523c8d1","Type":"ContainerStarted","Data":"abdac2451727f397869f0357b7e2e743208bd19ddd17e1542dc590674d865a51"} Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.511940 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kr2cq\" (UniqueName: \"kubernetes.io/projected/56bbc07d-0741-4064-b135-11222d19a27f-kube-api-access-kr2cq\") pod \"56bbc07d-0741-4064-b135-11222d19a27f\" (UID: \"56bbc07d-0741-4064-b135-11222d19a27f\") " Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.512011 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-dns-svc\") pod \"56bbc07d-0741-4064-b135-11222d19a27f\" (UID: \"56bbc07d-0741-4064-b135-11222d19a27f\") " Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.512124 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-ovsdbserver-sb\") pod \"56bbc07d-0741-4064-b135-11222d19a27f\" (UID: \"56bbc07d-0741-4064-b135-11222d19a27f\") " Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.512164 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-config\") pod \"56bbc07d-0741-4064-b135-11222d19a27f\" (UID: \"56bbc07d-0741-4064-b135-11222d19a27f\") " Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.512260 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-ovsdbserver-nb\") pod \"56bbc07d-0741-4064-b135-11222d19a27f\" (UID: \"56bbc07d-0741-4064-b135-11222d19a27f\") " Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.536595 4710 generic.go:334] "Generic (PLEG): container finished" podID="56bbc07d-0741-4064-b135-11222d19a27f" containerID="5c6aeedd69b3918fa9d76b3a8e01ae873664a8fb1a69071f0f2d04b28b413185" exitCode=0 Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.536764 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" event={"ID":"56bbc07d-0741-4064-b135-11222d19a27f","Type":"ContainerDied","Data":"5c6aeedd69b3918fa9d76b3a8e01ae873664a8fb1a69071f0f2d04b28b413185"} Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.536808 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" event={"ID":"56bbc07d-0741-4064-b135-11222d19a27f","Type":"ContainerDied","Data":"0e554b021473825c3434485fdb6749b919cbca460f34eb0033129cf8681b1be2"} Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.536831 4710 scope.go:117] "RemoveContainer" containerID="5c6aeedd69b3918fa9d76b3a8e01ae873664a8fb1a69071f0f2d04b28b413185" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.537007 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bc45f6dcf-8l6f9" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.547099 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56bbc07d-0741-4064-b135-11222d19a27f-kube-api-access-kr2cq" (OuterVolumeSpecName: "kube-api-access-kr2cq") pod "56bbc07d-0741-4064-b135-11222d19a27f" (UID: "56bbc07d-0741-4064-b135-11222d19a27f"). InnerVolumeSpecName "kube-api-access-kr2cq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.552745 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a28920d-7c53-4e95-990d-d7499229899f","Type":"ContainerStarted","Data":"acc54a4a3f2f93c817365f7d6b06460895f262559e67eb200ec7ffbcc2b25926"} Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.558804 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-jnpq9" podStartSLOduration=2.489964934 podStartE2EDuration="11.558777021s" podCreationTimestamp="2025-10-09 09:20:11 +0000 UTC" firstStartedPulling="2025-10-09 09:20:12.538538333 +0000 UTC m=+936.028646721" lastFinishedPulling="2025-10-09 09:20:21.607350411 +0000 UTC m=+945.097458808" observedRunningTime="2025-10-09 09:20:22.518086864 +0000 UTC m=+946.008195252" watchObservedRunningTime="2025-10-09 09:20:22.558777021 +0000 UTC m=+946.048885418" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.567346 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-6fx6n" podStartSLOduration=6.567310361 podStartE2EDuration="6.567310361s" podCreationTimestamp="2025-10-09 09:20:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:20:22.551091178 +0000 UTC m=+946.041199585" watchObservedRunningTime="2025-10-09 09:20:22.567310361 +0000 UTC m=+946.057418759" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.571128 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "56bbc07d-0741-4064-b135-11222d19a27f" (UID: "56bbc07d-0741-4064-b135-11222d19a27f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.581266 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "56bbc07d-0741-4064-b135-11222d19a27f" (UID: "56bbc07d-0741-4064-b135-11222d19a27f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.615645 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kr2cq\" (UniqueName: \"kubernetes.io/projected/56bbc07d-0741-4064-b135-11222d19a27f-kube-api-access-kr2cq\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.615683 4710 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.615697 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.632360 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-config" (OuterVolumeSpecName: "config") pod "56bbc07d-0741-4064-b135-11222d19a27f" (UID: "56bbc07d-0741-4064-b135-11222d19a27f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.658737 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "56bbc07d-0741-4064-b135-11222d19a27f" (UID: "56bbc07d-0741-4064-b135-11222d19a27f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.670884 4710 scope.go:117] "RemoveContainer" containerID="4679bba872472372fd50565236d1911daee969cad9d532ab3622f4efdc3ccd63" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.706631 4710 scope.go:117] "RemoveContainer" containerID="5c6aeedd69b3918fa9d76b3a8e01ae873664a8fb1a69071f0f2d04b28b413185" Oct 09 09:20:22 crc kubenswrapper[4710]: E1009 09:20:22.713086 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c6aeedd69b3918fa9d76b3a8e01ae873664a8fb1a69071f0f2d04b28b413185\": container with ID starting with 5c6aeedd69b3918fa9d76b3a8e01ae873664a8fb1a69071f0f2d04b28b413185 not found: ID does not exist" containerID="5c6aeedd69b3918fa9d76b3a8e01ae873664a8fb1a69071f0f2d04b28b413185" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.713147 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c6aeedd69b3918fa9d76b3a8e01ae873664a8fb1a69071f0f2d04b28b413185"} err="failed to get container status \"5c6aeedd69b3918fa9d76b3a8e01ae873664a8fb1a69071f0f2d04b28b413185\": rpc error: code = NotFound desc = could not find container \"5c6aeedd69b3918fa9d76b3a8e01ae873664a8fb1a69071f0f2d04b28b413185\": container with ID starting with 5c6aeedd69b3918fa9d76b3a8e01ae873664a8fb1a69071f0f2d04b28b413185 not found: ID does not exist" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.713194 4710 scope.go:117] "RemoveContainer" containerID="4679bba872472372fd50565236d1911daee969cad9d532ab3622f4efdc3ccd63" Oct 09 09:20:22 crc kubenswrapper[4710]: E1009 09:20:22.713980 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4679bba872472372fd50565236d1911daee969cad9d532ab3622f4efdc3ccd63\": container with ID starting with 4679bba872472372fd50565236d1911daee969cad9d532ab3622f4efdc3ccd63 not found: ID does not exist" containerID="4679bba872472372fd50565236d1911daee969cad9d532ab3622f4efdc3ccd63" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.714041 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4679bba872472372fd50565236d1911daee969cad9d532ab3622f4efdc3ccd63"} err="failed to get container status \"4679bba872472372fd50565236d1911daee969cad9d532ab3622f4efdc3ccd63\": rpc error: code = NotFound desc = could not find container \"4679bba872472372fd50565236d1911daee969cad9d532ab3622f4efdc3ccd63\": container with ID starting with 4679bba872472372fd50565236d1911daee969cad9d532ab3622f4efdc3ccd63 not found: ID does not exist" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.718288 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.718322 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/56bbc07d-0741-4064-b135-11222d19a27f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.854465 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-dqvzw"] Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.862782 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-dqvzw"] Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.884166 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bc45f6dcf-8l6f9"] Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.896195 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bc45f6dcf-8l6f9"] Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.948097 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-mm2sn"] Oct 09 09:20:22 crc kubenswrapper[4710]: E1009 09:20:22.948847 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56bbc07d-0741-4064-b135-11222d19a27f" containerName="dnsmasq-dns" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.948880 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="56bbc07d-0741-4064-b135-11222d19a27f" containerName="dnsmasq-dns" Oct 09 09:20:22 crc kubenswrapper[4710]: E1009 09:20:22.948915 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b43105bc-a07d-402a-bef9-d4946f2827c3" containerName="glance-db-sync" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.948922 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="b43105bc-a07d-402a-bef9-d4946f2827c3" containerName="glance-db-sync" Oct 09 09:20:22 crc kubenswrapper[4710]: E1009 09:20:22.948955 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9223e283-9e4c-4e89-bfb8-d0eda7796b61" containerName="keystone-bootstrap" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.948964 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="9223e283-9e4c-4e89-bfb8-d0eda7796b61" containerName="keystone-bootstrap" Oct 09 09:20:22 crc kubenswrapper[4710]: E1009 09:20:22.948988 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56bbc07d-0741-4064-b135-11222d19a27f" containerName="init" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.948995 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="56bbc07d-0741-4064-b135-11222d19a27f" containerName="init" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.949320 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="9223e283-9e4c-4e89-bfb8-d0eda7796b61" containerName="keystone-bootstrap" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.949354 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="56bbc07d-0741-4064-b135-11222d19a27f" containerName="dnsmasq-dns" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.949364 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="b43105bc-a07d-402a-bef9-d4946f2827c3" containerName="glance-db-sync" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.950970 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-mm2sn" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.960640 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.960917 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.961342 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 09 09:20:22 crc kubenswrapper[4710]: I1009 09:20:22.969555 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-jdrf2" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.009933 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-mm2sn"] Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.133539 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-scripts\") pod \"keystone-bootstrap-mm2sn\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " pod="openstack/keystone-bootstrap-mm2sn" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.133691 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-combined-ca-bundle\") pod \"keystone-bootstrap-mm2sn\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " pod="openstack/keystone-bootstrap-mm2sn" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.133780 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-credential-keys\") pod \"keystone-bootstrap-mm2sn\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " pod="openstack/keystone-bootstrap-mm2sn" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.133854 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-fernet-keys\") pod \"keystone-bootstrap-mm2sn\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " pod="openstack/keystone-bootstrap-mm2sn" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.133912 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bc6nx\" (UniqueName: \"kubernetes.io/projected/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-kube-api-access-bc6nx\") pod \"keystone-bootstrap-mm2sn\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " pod="openstack/keystone-bootstrap-mm2sn" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.133974 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-config-data\") pod \"keystone-bootstrap-mm2sn\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " pod="openstack/keystone-bootstrap-mm2sn" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.236140 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-scripts\") pod \"keystone-bootstrap-mm2sn\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " pod="openstack/keystone-bootstrap-mm2sn" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.236351 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-combined-ca-bundle\") pod \"keystone-bootstrap-mm2sn\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " pod="openstack/keystone-bootstrap-mm2sn" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.236413 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-credential-keys\") pod \"keystone-bootstrap-mm2sn\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " pod="openstack/keystone-bootstrap-mm2sn" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.236525 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-fernet-keys\") pod \"keystone-bootstrap-mm2sn\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " pod="openstack/keystone-bootstrap-mm2sn" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.236830 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bc6nx\" (UniqueName: \"kubernetes.io/projected/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-kube-api-access-bc6nx\") pod \"keystone-bootstrap-mm2sn\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " pod="openstack/keystone-bootstrap-mm2sn" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.236891 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-config-data\") pod \"keystone-bootstrap-mm2sn\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " pod="openstack/keystone-bootstrap-mm2sn" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.274567 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-credential-keys\") pod \"keystone-bootstrap-mm2sn\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " pod="openstack/keystone-bootstrap-mm2sn" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.274707 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-combined-ca-bundle\") pod \"keystone-bootstrap-mm2sn\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " pod="openstack/keystone-bootstrap-mm2sn" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.275271 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-config-data\") pod \"keystone-bootstrap-mm2sn\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " pod="openstack/keystone-bootstrap-mm2sn" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.276456 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-scripts\") pod \"keystone-bootstrap-mm2sn\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " pod="openstack/keystone-bootstrap-mm2sn" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.277535 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bc6nx\" (UniqueName: \"kubernetes.io/projected/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-kube-api-access-bc6nx\") pod \"keystone-bootstrap-mm2sn\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " pod="openstack/keystone-bootstrap-mm2sn" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.284176 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-fernet-keys\") pod \"keystone-bootstrap-mm2sn\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " pod="openstack/keystone-bootstrap-mm2sn" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.310352 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-mm2sn" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.379423 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-748d7644cf-5dqfk"] Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.390082 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.433825 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-748d7644cf-5dqfk"] Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.543897 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-dns-svc\") pod \"dnsmasq-dns-748d7644cf-5dqfk\" (UID: \"0181c67b-615b-4354-b053-8effb7ab7fab\") " pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.544274 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4sbb\" (UniqueName: \"kubernetes.io/projected/0181c67b-615b-4354-b053-8effb7ab7fab-kube-api-access-p4sbb\") pod \"dnsmasq-dns-748d7644cf-5dqfk\" (UID: \"0181c67b-615b-4354-b053-8effb7ab7fab\") " pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.544390 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-ovsdbserver-sb\") pod \"dnsmasq-dns-748d7644cf-5dqfk\" (UID: \"0181c67b-615b-4354-b053-8effb7ab7fab\") " pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.544505 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-config\") pod \"dnsmasq-dns-748d7644cf-5dqfk\" (UID: \"0181c67b-615b-4354-b053-8effb7ab7fab\") " pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.544648 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-ovsdbserver-nb\") pod \"dnsmasq-dns-748d7644cf-5dqfk\" (UID: \"0181c67b-615b-4354-b053-8effb7ab7fab\") " pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.646684 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-config\") pod \"dnsmasq-dns-748d7644cf-5dqfk\" (UID: \"0181c67b-615b-4354-b053-8effb7ab7fab\") " pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.646853 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-ovsdbserver-nb\") pod \"dnsmasq-dns-748d7644cf-5dqfk\" (UID: \"0181c67b-615b-4354-b053-8effb7ab7fab\") " pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.646937 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-dns-svc\") pod \"dnsmasq-dns-748d7644cf-5dqfk\" (UID: \"0181c67b-615b-4354-b053-8effb7ab7fab\") " pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.647020 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4sbb\" (UniqueName: \"kubernetes.io/projected/0181c67b-615b-4354-b053-8effb7ab7fab-kube-api-access-p4sbb\") pod \"dnsmasq-dns-748d7644cf-5dqfk\" (UID: \"0181c67b-615b-4354-b053-8effb7ab7fab\") " pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.647077 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-ovsdbserver-sb\") pod \"dnsmasq-dns-748d7644cf-5dqfk\" (UID: \"0181c67b-615b-4354-b053-8effb7ab7fab\") " pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.648351 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-ovsdbserver-nb\") pod \"dnsmasq-dns-748d7644cf-5dqfk\" (UID: \"0181c67b-615b-4354-b053-8effb7ab7fab\") " pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.648371 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-ovsdbserver-sb\") pod \"dnsmasq-dns-748d7644cf-5dqfk\" (UID: \"0181c67b-615b-4354-b053-8effb7ab7fab\") " pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.648910 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-config\") pod \"dnsmasq-dns-748d7644cf-5dqfk\" (UID: \"0181c67b-615b-4354-b053-8effb7ab7fab\") " pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.648939 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-dns-svc\") pod \"dnsmasq-dns-748d7644cf-5dqfk\" (UID: \"0181c67b-615b-4354-b053-8effb7ab7fab\") " pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.698459 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4sbb\" (UniqueName: \"kubernetes.io/projected/0181c67b-615b-4354-b053-8effb7ab7fab-kube-api-access-p4sbb\") pod \"dnsmasq-dns-748d7644cf-5dqfk\" (UID: \"0181c67b-615b-4354-b053-8effb7ab7fab\") " pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" Oct 09 09:20:23 crc kubenswrapper[4710]: I1009 09:20:23.718401 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" Oct 09 09:20:24 crc kubenswrapper[4710]: I1009 09:20:24.016017 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-mm2sn"] Oct 09 09:20:24 crc kubenswrapper[4710]: I1009 09:20:24.295872 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-748d7644cf-5dqfk"] Oct 09 09:20:24 crc kubenswrapper[4710]: W1009 09:20:24.309485 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0181c67b_615b_4354_b053_8effb7ab7fab.slice/crio-fa014dc060821784ed6b82bb797781e5d858c5e43d287bc38a339477eddfdd62 WatchSource:0}: Error finding container fa014dc060821784ed6b82bb797781e5d858c5e43d287bc38a339477eddfdd62: Status 404 returned error can't find the container with id fa014dc060821784ed6b82bb797781e5d858c5e43d287bc38a339477eddfdd62 Oct 09 09:20:24 crc kubenswrapper[4710]: I1009 09:20:24.605755 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" event={"ID":"0181c67b-615b-4354-b053-8effb7ab7fab","Type":"ContainerStarted","Data":"f7f3facd1ba76cf0d3c42102e0dc594323c1c9167de689cd1140371c34714593"} Oct 09 09:20:24 crc kubenswrapper[4710]: I1009 09:20:24.605806 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" event={"ID":"0181c67b-615b-4354-b053-8effb7ab7fab","Type":"ContainerStarted","Data":"fa014dc060821784ed6b82bb797781e5d858c5e43d287bc38a339477eddfdd62"} Oct 09 09:20:24 crc kubenswrapper[4710]: I1009 09:20:24.611914 4710 generic.go:334] "Generic (PLEG): container finished" podID="dc5ec23c-a515-4aa1-8a89-db7c503d4c9d" containerID="4e0274b649334309e28136c5402649371d08cc0a325fc7305119e64daa2b7400" exitCode=0 Oct 09 09:20:24 crc kubenswrapper[4710]: I1009 09:20:24.611976 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jnpq9" event={"ID":"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d","Type":"ContainerDied","Data":"4e0274b649334309e28136c5402649371d08cc0a325fc7305119e64daa2b7400"} Oct 09 09:20:24 crc kubenswrapper[4710]: I1009 09:20:24.613336 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-mm2sn" event={"ID":"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e","Type":"ContainerStarted","Data":"c012d9cb94b56b59689abe9e4a17f95d8ca3aae9229bacd28068cd2469260128"} Oct 09 09:20:24 crc kubenswrapper[4710]: I1009 09:20:24.613366 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-mm2sn" event={"ID":"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e","Type":"ContainerStarted","Data":"e993cb0fc5e064c269ed13dcc841316e1afe088f79bfa10de62cca3f97b546a9"} Oct 09 09:20:24 crc kubenswrapper[4710]: I1009 09:20:24.664264 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-mm2sn" podStartSLOduration=2.664236415 podStartE2EDuration="2.664236415s" podCreationTimestamp="2025-10-09 09:20:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:20:24.657908313 +0000 UTC m=+948.148016710" watchObservedRunningTime="2025-10-09 09:20:24.664236415 +0000 UTC m=+948.154344812" Oct 09 09:20:24 crc kubenswrapper[4710]: I1009 09:20:24.826558 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56bbc07d-0741-4064-b135-11222d19a27f" path="/var/lib/kubelet/pods/56bbc07d-0741-4064-b135-11222d19a27f/volumes" Oct 09 09:20:24 crc kubenswrapper[4710]: I1009 09:20:24.827877 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9223e283-9e4c-4e89-bfb8-d0eda7796b61" path="/var/lib/kubelet/pods/9223e283-9e4c-4e89-bfb8-d0eda7796b61/volumes" Oct 09 09:20:25 crc kubenswrapper[4710]: I1009 09:20:25.645493 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a28920d-7c53-4e95-990d-d7499229899f","Type":"ContainerStarted","Data":"3fa7718185235f8cbfbd51f1953b57efb9f226c640f87ac4e069445cc1143fe3"} Oct 09 09:20:25 crc kubenswrapper[4710]: I1009 09:20:25.652321 4710 generic.go:334] "Generic (PLEG): container finished" podID="0181c67b-615b-4354-b053-8effb7ab7fab" containerID="f7f3facd1ba76cf0d3c42102e0dc594323c1c9167de689cd1140371c34714593" exitCode=0 Oct 09 09:20:25 crc kubenswrapper[4710]: I1009 09:20:25.652568 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" event={"ID":"0181c67b-615b-4354-b053-8effb7ab7fab","Type":"ContainerDied","Data":"f7f3facd1ba76cf0d3c42102e0dc594323c1c9167de689cd1140371c34714593"} Oct 09 09:20:25 crc kubenswrapper[4710]: I1009 09:20:25.972687 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jnpq9" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.109764 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-config-data\") pod \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\" (UID: \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\") " Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.109898 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-logs\") pod \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\" (UID: \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\") " Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.109933 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-combined-ca-bundle\") pod \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\" (UID: \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\") " Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.110018 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dm7pw\" (UniqueName: \"kubernetes.io/projected/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-kube-api-access-dm7pw\") pod \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\" (UID: \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\") " Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.110053 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-scripts\") pod \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\" (UID: \"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d\") " Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.111359 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-logs" (OuterVolumeSpecName: "logs") pod "dc5ec23c-a515-4aa1-8a89-db7c503d4c9d" (UID: "dc5ec23c-a515-4aa1-8a89-db7c503d4c9d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.118708 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-scripts" (OuterVolumeSpecName: "scripts") pod "dc5ec23c-a515-4aa1-8a89-db7c503d4c9d" (UID: "dc5ec23c-a515-4aa1-8a89-db7c503d4c9d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.130541 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-kube-api-access-dm7pw" (OuterVolumeSpecName: "kube-api-access-dm7pw") pod "dc5ec23c-a515-4aa1-8a89-db7c503d4c9d" (UID: "dc5ec23c-a515-4aa1-8a89-db7c503d4c9d"). InnerVolumeSpecName "kube-api-access-dm7pw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.149551 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc5ec23c-a515-4aa1-8a89-db7c503d4c9d" (UID: "dc5ec23c-a515-4aa1-8a89-db7c503d4c9d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.156129 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-config-data" (OuterVolumeSpecName: "config-data") pod "dc5ec23c-a515-4aa1-8a89-db7c503d4c9d" (UID: "dc5ec23c-a515-4aa1-8a89-db7c503d4c9d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.212097 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dm7pw\" (UniqueName: \"kubernetes.io/projected/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-kube-api-access-dm7pw\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.212123 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.212132 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.212143 4710 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-logs\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.212151 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.704675 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" event={"ID":"0181c67b-615b-4354-b053-8effb7ab7fab","Type":"ContainerStarted","Data":"946ada98953ca1e535b42950d4ab730001c4879dc1184cd7fabaaee698770a1d"} Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.704985 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.715992 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jnpq9" event={"ID":"dc5ec23c-a515-4aa1-8a89-db7c503d4c9d","Type":"ContainerDied","Data":"78883747ec1495e9338c72f829da3542b0ede207910abd0d05514409dc1947c5"} Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.716037 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="78883747ec1495e9338c72f829da3542b0ede207910abd0d05514409dc1947c5" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.716095 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jnpq9" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.734410 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" podStartSLOduration=3.734387601 podStartE2EDuration="3.734387601s" podCreationTimestamp="2025-10-09 09:20:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:20:26.723690291 +0000 UTC m=+950.213798688" watchObservedRunningTime="2025-10-09 09:20:26.734387601 +0000 UTC m=+950.224495998" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.789345 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-685c5ffc88-49dzx"] Oct 09 09:20:26 crc kubenswrapper[4710]: E1009 09:20:26.789711 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc5ec23c-a515-4aa1-8a89-db7c503d4c9d" containerName="placement-db-sync" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.789734 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc5ec23c-a515-4aa1-8a89-db7c503d4c9d" containerName="placement-db-sync" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.789897 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc5ec23c-a515-4aa1-8a89-db7c503d4c9d" containerName="placement-db-sync" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.793498 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.802153 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.802198 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.802515 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-v52c5" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.802550 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.802814 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.845386 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-685c5ffc88-49dzx"] Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.934688 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a038f2bb-2364-4a8d-918c-a0776dfa8458-scripts\") pod \"placement-685c5ffc88-49dzx\" (UID: \"a038f2bb-2364-4a8d-918c-a0776dfa8458\") " pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.934762 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a038f2bb-2364-4a8d-918c-a0776dfa8458-public-tls-certs\") pod \"placement-685c5ffc88-49dzx\" (UID: \"a038f2bb-2364-4a8d-918c-a0776dfa8458\") " pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.934802 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a038f2bb-2364-4a8d-918c-a0776dfa8458-internal-tls-certs\") pod \"placement-685c5ffc88-49dzx\" (UID: \"a038f2bb-2364-4a8d-918c-a0776dfa8458\") " pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.934839 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a038f2bb-2364-4a8d-918c-a0776dfa8458-logs\") pod \"placement-685c5ffc88-49dzx\" (UID: \"a038f2bb-2364-4a8d-918c-a0776dfa8458\") " pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.934960 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a038f2bb-2364-4a8d-918c-a0776dfa8458-combined-ca-bundle\") pod \"placement-685c5ffc88-49dzx\" (UID: \"a038f2bb-2364-4a8d-918c-a0776dfa8458\") " pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.935016 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-456br\" (UniqueName: \"kubernetes.io/projected/a038f2bb-2364-4a8d-918c-a0776dfa8458-kube-api-access-456br\") pod \"placement-685c5ffc88-49dzx\" (UID: \"a038f2bb-2364-4a8d-918c-a0776dfa8458\") " pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:26 crc kubenswrapper[4710]: I1009 09:20:26.935033 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a038f2bb-2364-4a8d-918c-a0776dfa8458-config-data\") pod \"placement-685c5ffc88-49dzx\" (UID: \"a038f2bb-2364-4a8d-918c-a0776dfa8458\") " pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:27 crc kubenswrapper[4710]: I1009 09:20:27.036541 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a038f2bb-2364-4a8d-918c-a0776dfa8458-scripts\") pod \"placement-685c5ffc88-49dzx\" (UID: \"a038f2bb-2364-4a8d-918c-a0776dfa8458\") " pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:27 crc kubenswrapper[4710]: I1009 09:20:27.036592 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a038f2bb-2364-4a8d-918c-a0776dfa8458-public-tls-certs\") pod \"placement-685c5ffc88-49dzx\" (UID: \"a038f2bb-2364-4a8d-918c-a0776dfa8458\") " pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:27 crc kubenswrapper[4710]: I1009 09:20:27.036625 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a038f2bb-2364-4a8d-918c-a0776dfa8458-internal-tls-certs\") pod \"placement-685c5ffc88-49dzx\" (UID: \"a038f2bb-2364-4a8d-918c-a0776dfa8458\") " pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:27 crc kubenswrapper[4710]: I1009 09:20:27.036649 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a038f2bb-2364-4a8d-918c-a0776dfa8458-logs\") pod \"placement-685c5ffc88-49dzx\" (UID: \"a038f2bb-2364-4a8d-918c-a0776dfa8458\") " pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:27 crc kubenswrapper[4710]: I1009 09:20:27.036700 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a038f2bb-2364-4a8d-918c-a0776dfa8458-combined-ca-bundle\") pod \"placement-685c5ffc88-49dzx\" (UID: \"a038f2bb-2364-4a8d-918c-a0776dfa8458\") " pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:27 crc kubenswrapper[4710]: I1009 09:20:27.036749 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-456br\" (UniqueName: \"kubernetes.io/projected/a038f2bb-2364-4a8d-918c-a0776dfa8458-kube-api-access-456br\") pod \"placement-685c5ffc88-49dzx\" (UID: \"a038f2bb-2364-4a8d-918c-a0776dfa8458\") " pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:27 crc kubenswrapper[4710]: I1009 09:20:27.036764 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a038f2bb-2364-4a8d-918c-a0776dfa8458-config-data\") pod \"placement-685c5ffc88-49dzx\" (UID: \"a038f2bb-2364-4a8d-918c-a0776dfa8458\") " pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:27 crc kubenswrapper[4710]: I1009 09:20:27.040584 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a038f2bb-2364-4a8d-918c-a0776dfa8458-logs\") pod \"placement-685c5ffc88-49dzx\" (UID: \"a038f2bb-2364-4a8d-918c-a0776dfa8458\") " pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:27 crc kubenswrapper[4710]: I1009 09:20:27.045932 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a038f2bb-2364-4a8d-918c-a0776dfa8458-combined-ca-bundle\") pod \"placement-685c5ffc88-49dzx\" (UID: \"a038f2bb-2364-4a8d-918c-a0776dfa8458\") " pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:27 crc kubenswrapper[4710]: I1009 09:20:27.062259 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a038f2bb-2364-4a8d-918c-a0776dfa8458-scripts\") pod \"placement-685c5ffc88-49dzx\" (UID: \"a038f2bb-2364-4a8d-918c-a0776dfa8458\") " pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:27 crc kubenswrapper[4710]: I1009 09:20:27.062612 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a038f2bb-2364-4a8d-918c-a0776dfa8458-config-data\") pod \"placement-685c5ffc88-49dzx\" (UID: \"a038f2bb-2364-4a8d-918c-a0776dfa8458\") " pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:27 crc kubenswrapper[4710]: I1009 09:20:27.062832 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-456br\" (UniqueName: \"kubernetes.io/projected/a038f2bb-2364-4a8d-918c-a0776dfa8458-kube-api-access-456br\") pod \"placement-685c5ffc88-49dzx\" (UID: \"a038f2bb-2364-4a8d-918c-a0776dfa8458\") " pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:27 crc kubenswrapper[4710]: I1009 09:20:27.065835 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a038f2bb-2364-4a8d-918c-a0776dfa8458-public-tls-certs\") pod \"placement-685c5ffc88-49dzx\" (UID: \"a038f2bb-2364-4a8d-918c-a0776dfa8458\") " pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:27 crc kubenswrapper[4710]: I1009 09:20:27.067924 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a038f2bb-2364-4a8d-918c-a0776dfa8458-internal-tls-certs\") pod \"placement-685c5ffc88-49dzx\" (UID: \"a038f2bb-2364-4a8d-918c-a0776dfa8458\") " pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:27 crc kubenswrapper[4710]: I1009 09:20:27.118891 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:28 crc kubenswrapper[4710]: I1009 09:20:28.743197 4710 generic.go:334] "Generic (PLEG): container finished" podID="80c435f8-7448-4aa8-bc8b-f40bcca2ff1e" containerID="c012d9cb94b56b59689abe9e4a17f95d8ca3aae9229bacd28068cd2469260128" exitCode=0 Oct 09 09:20:28 crc kubenswrapper[4710]: I1009 09:20:28.743357 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-mm2sn" event={"ID":"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e","Type":"ContainerDied","Data":"c012d9cb94b56b59689abe9e4a17f95d8ca3aae9229bacd28068cd2469260128"} Oct 09 09:20:31 crc kubenswrapper[4710]: I1009 09:20:31.025654 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-mm2sn" Oct 09 09:20:31 crc kubenswrapper[4710]: I1009 09:20:31.128739 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-scripts\") pod \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " Oct 09 09:20:31 crc kubenswrapper[4710]: I1009 09:20:31.128986 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-credential-keys\") pod \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " Oct 09 09:20:31 crc kubenswrapper[4710]: I1009 09:20:31.129040 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-config-data\") pod \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " Oct 09 09:20:31 crc kubenswrapper[4710]: I1009 09:20:31.129322 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-combined-ca-bundle\") pod \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " Oct 09 09:20:31 crc kubenswrapper[4710]: I1009 09:20:31.130144 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-fernet-keys\") pod \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " Oct 09 09:20:31 crc kubenswrapper[4710]: I1009 09:20:31.130211 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bc6nx\" (UniqueName: \"kubernetes.io/projected/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-kube-api-access-bc6nx\") pod \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\" (UID: \"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e\") " Oct 09 09:20:31 crc kubenswrapper[4710]: I1009 09:20:31.137767 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "80c435f8-7448-4aa8-bc8b-f40bcca2ff1e" (UID: "80c435f8-7448-4aa8-bc8b-f40bcca2ff1e"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:20:31 crc kubenswrapper[4710]: I1009 09:20:31.139459 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-scripts" (OuterVolumeSpecName: "scripts") pod "80c435f8-7448-4aa8-bc8b-f40bcca2ff1e" (UID: "80c435f8-7448-4aa8-bc8b-f40bcca2ff1e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:20:31 crc kubenswrapper[4710]: I1009 09:20:31.156531 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "80c435f8-7448-4aa8-bc8b-f40bcca2ff1e" (UID: "80c435f8-7448-4aa8-bc8b-f40bcca2ff1e"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:20:31 crc kubenswrapper[4710]: I1009 09:20:31.156727 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-kube-api-access-bc6nx" (OuterVolumeSpecName: "kube-api-access-bc6nx") pod "80c435f8-7448-4aa8-bc8b-f40bcca2ff1e" (UID: "80c435f8-7448-4aa8-bc8b-f40bcca2ff1e"). InnerVolumeSpecName "kube-api-access-bc6nx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:20:31 crc kubenswrapper[4710]: I1009 09:20:31.163774 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "80c435f8-7448-4aa8-bc8b-f40bcca2ff1e" (UID: "80c435f8-7448-4aa8-bc8b-f40bcca2ff1e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:20:31 crc kubenswrapper[4710]: I1009 09:20:31.167998 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-config-data" (OuterVolumeSpecName: "config-data") pod "80c435f8-7448-4aa8-bc8b-f40bcca2ff1e" (UID: "80c435f8-7448-4aa8-bc8b-f40bcca2ff1e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:20:31 crc kubenswrapper[4710]: I1009 09:20:31.233050 4710 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:31 crc kubenswrapper[4710]: I1009 09:20:31.233082 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:31 crc kubenswrapper[4710]: I1009 09:20:31.233092 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:31 crc kubenswrapper[4710]: I1009 09:20:31.233104 4710 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:31 crc kubenswrapper[4710]: I1009 09:20:31.233113 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bc6nx\" (UniqueName: \"kubernetes.io/projected/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-kube-api-access-bc6nx\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:31 crc kubenswrapper[4710]: I1009 09:20:31.233125 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:31 crc kubenswrapper[4710]: I1009 09:20:31.791106 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-mm2sn" event={"ID":"80c435f8-7448-4aa8-bc8b-f40bcca2ff1e","Type":"ContainerDied","Data":"e993cb0fc5e064c269ed13dcc841316e1afe088f79bfa10de62cca3f97b546a9"} Oct 09 09:20:31 crc kubenswrapper[4710]: I1009 09:20:31.791150 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e993cb0fc5e064c269ed13dcc841316e1afe088f79bfa10de62cca3f97b546a9" Oct 09 09:20:31 crc kubenswrapper[4710]: I1009 09:20:31.791230 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-mm2sn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.111097 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7746794c77-ljwdn"] Oct 09 09:20:32 crc kubenswrapper[4710]: E1009 09:20:32.111560 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80c435f8-7448-4aa8-bc8b-f40bcca2ff1e" containerName="keystone-bootstrap" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.111580 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="80c435f8-7448-4aa8-bc8b-f40bcca2ff1e" containerName="keystone-bootstrap" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.111837 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="80c435f8-7448-4aa8-bc8b-f40bcca2ff1e" containerName="keystone-bootstrap" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.112526 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.114309 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.114390 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.115894 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.116102 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.118217 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-jdrf2" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.120392 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.181075 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7746794c77-ljwdn"] Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.257977 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa709644-7781-443c-b0e3-d5936fff1dde-scripts\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.258027 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aa709644-7781-443c-b0e3-d5936fff1dde-credential-keys\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.258170 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aa709644-7781-443c-b0e3-d5936fff1dde-fernet-keys\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.258319 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa709644-7781-443c-b0e3-d5936fff1dde-internal-tls-certs\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.258352 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9cfh\" (UniqueName: \"kubernetes.io/projected/aa709644-7781-443c-b0e3-d5936fff1dde-kube-api-access-d9cfh\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.258457 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa709644-7781-443c-b0e3-d5936fff1dde-public-tls-certs\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.258501 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa709644-7781-443c-b0e3-d5936fff1dde-config-data\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.258556 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa709644-7781-443c-b0e3-d5936fff1dde-combined-ca-bundle\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.360064 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa709644-7781-443c-b0e3-d5936fff1dde-scripts\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.360112 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aa709644-7781-443c-b0e3-d5936fff1dde-credential-keys\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.360137 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aa709644-7781-443c-b0e3-d5936fff1dde-fernet-keys\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.360183 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa709644-7781-443c-b0e3-d5936fff1dde-internal-tls-certs\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.360219 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9cfh\" (UniqueName: \"kubernetes.io/projected/aa709644-7781-443c-b0e3-d5936fff1dde-kube-api-access-d9cfh\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.360258 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa709644-7781-443c-b0e3-d5936fff1dde-public-tls-certs\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.360280 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa709644-7781-443c-b0e3-d5936fff1dde-config-data\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.360310 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa709644-7781-443c-b0e3-d5936fff1dde-combined-ca-bundle\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.365717 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa709644-7781-443c-b0e3-d5936fff1dde-public-tls-certs\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.366257 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aa709644-7781-443c-b0e3-d5936fff1dde-fernet-keys\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.368564 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa709644-7781-443c-b0e3-d5936fff1dde-scripts\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.368945 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa709644-7781-443c-b0e3-d5936fff1dde-combined-ca-bundle\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.372929 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa709644-7781-443c-b0e3-d5936fff1dde-config-data\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.373296 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aa709644-7781-443c-b0e3-d5936fff1dde-credential-keys\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.375389 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa709644-7781-443c-b0e3-d5936fff1dde-internal-tls-certs\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.378272 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9cfh\" (UniqueName: \"kubernetes.io/projected/aa709644-7781-443c-b0e3-d5936fff1dde-kube-api-access-d9cfh\") pod \"keystone-7746794c77-ljwdn\" (UID: \"aa709644-7781-443c-b0e3-d5936fff1dde\") " pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.431590 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.824844 4710 generic.go:334] "Generic (PLEG): container finished" podID="1a69a4b9-5080-4c08-bae1-86c06523c8d1" containerID="421a3a76cd9f2b97043f74dc6e354541cc3aaeab3dc648ace7265d8049c09dc4" exitCode=0 Oct 09 09:20:32 crc kubenswrapper[4710]: I1009 09:20:32.830012 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6fx6n" event={"ID":"1a69a4b9-5080-4c08-bae1-86c06523c8d1","Type":"ContainerDied","Data":"421a3a76cd9f2b97043f74dc6e354541cc3aaeab3dc648ace7265d8049c09dc4"} Oct 09 09:20:33 crc kubenswrapper[4710]: I1009 09:20:33.720606 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" Oct 09 09:20:33 crc kubenswrapper[4710]: I1009 09:20:33.773738 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f549f7b99-6bx4v"] Oct 09 09:20:33 crc kubenswrapper[4710]: I1009 09:20:33.774022 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" podUID="b964db51-b1b4-4328-8134-3e4adf82dd59" containerName="dnsmasq-dns" containerID="cri-o://cb200e797aebf6edf00497670b39540bb193adbc4ae78b4c6fcb0b365698928c" gracePeriod=10 Oct 09 09:20:34 crc kubenswrapper[4710]: I1009 09:20:34.845937 4710 generic.go:334] "Generic (PLEG): container finished" podID="b964db51-b1b4-4328-8134-3e4adf82dd59" containerID="cb200e797aebf6edf00497670b39540bb193adbc4ae78b4c6fcb0b365698928c" exitCode=0 Oct 09 09:20:34 crc kubenswrapper[4710]: I1009 09:20:34.846007 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" event={"ID":"b964db51-b1b4-4328-8134-3e4adf82dd59","Type":"ContainerDied","Data":"cb200e797aebf6edf00497670b39540bb193adbc4ae78b4c6fcb0b365698928c"} Oct 09 09:20:35 crc kubenswrapper[4710]: I1009 09:20:35.725013 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6fx6n" Oct 09 09:20:35 crc kubenswrapper[4710]: I1009 09:20:35.846613 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1a69a4b9-5080-4c08-bae1-86c06523c8d1-config\") pod \"1a69a4b9-5080-4c08-bae1-86c06523c8d1\" (UID: \"1a69a4b9-5080-4c08-bae1-86c06523c8d1\") " Oct 09 09:20:35 crc kubenswrapper[4710]: I1009 09:20:35.846660 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a69a4b9-5080-4c08-bae1-86c06523c8d1-combined-ca-bundle\") pod \"1a69a4b9-5080-4c08-bae1-86c06523c8d1\" (UID: \"1a69a4b9-5080-4c08-bae1-86c06523c8d1\") " Oct 09 09:20:35 crc kubenswrapper[4710]: I1009 09:20:35.846937 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lgbrq\" (UniqueName: \"kubernetes.io/projected/1a69a4b9-5080-4c08-bae1-86c06523c8d1-kube-api-access-lgbrq\") pod \"1a69a4b9-5080-4c08-bae1-86c06523c8d1\" (UID: \"1a69a4b9-5080-4c08-bae1-86c06523c8d1\") " Oct 09 09:20:35 crc kubenswrapper[4710]: I1009 09:20:35.852507 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a69a4b9-5080-4c08-bae1-86c06523c8d1-kube-api-access-lgbrq" (OuterVolumeSpecName: "kube-api-access-lgbrq") pod "1a69a4b9-5080-4c08-bae1-86c06523c8d1" (UID: "1a69a4b9-5080-4c08-bae1-86c06523c8d1"). InnerVolumeSpecName "kube-api-access-lgbrq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:20:35 crc kubenswrapper[4710]: I1009 09:20:35.859392 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6fx6n" event={"ID":"1a69a4b9-5080-4c08-bae1-86c06523c8d1","Type":"ContainerDied","Data":"abdac2451727f397869f0357b7e2e743208bd19ddd17e1542dc590674d865a51"} Oct 09 09:20:35 crc kubenswrapper[4710]: I1009 09:20:35.859457 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="abdac2451727f397869f0357b7e2e743208bd19ddd17e1542dc590674d865a51" Oct 09 09:20:35 crc kubenswrapper[4710]: I1009 09:20:35.859520 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6fx6n" Oct 09 09:20:35 crc kubenswrapper[4710]: I1009 09:20:35.871644 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a69a4b9-5080-4c08-bae1-86c06523c8d1-config" (OuterVolumeSpecName: "config") pod "1a69a4b9-5080-4c08-bae1-86c06523c8d1" (UID: "1a69a4b9-5080-4c08-bae1-86c06523c8d1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:20:35 crc kubenswrapper[4710]: I1009 09:20:35.877538 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a69a4b9-5080-4c08-bae1-86c06523c8d1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1a69a4b9-5080-4c08-bae1-86c06523c8d1" (UID: "1a69a4b9-5080-4c08-bae1-86c06523c8d1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:20:35 crc kubenswrapper[4710]: I1009 09:20:35.950636 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lgbrq\" (UniqueName: \"kubernetes.io/projected/1a69a4b9-5080-4c08-bae1-86c06523c8d1-kube-api-access-lgbrq\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:35 crc kubenswrapper[4710]: I1009 09:20:35.950671 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/1a69a4b9-5080-4c08-bae1-86c06523c8d1-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:35 crc kubenswrapper[4710]: I1009 09:20:35.950681 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a69a4b9-5080-4c08-bae1-86c06523c8d1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.017867 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6d7d647849-8j4tk"] Oct 09 09:20:37 crc kubenswrapper[4710]: E1009 09:20:37.033610 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a69a4b9-5080-4c08-bae1-86c06523c8d1" containerName="neutron-db-sync" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.033641 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a69a4b9-5080-4c08-bae1-86c06523c8d1" containerName="neutron-db-sync" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.033924 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a69a4b9-5080-4c08-bae1-86c06523c8d1" containerName="neutron-db-sync" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.034876 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.065040 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d7d647849-8j4tk"] Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.105153 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-ovsdbserver-nb\") pod \"dnsmasq-dns-6d7d647849-8j4tk\" (UID: \"c22525fc-cc11-40ce-9f18-c59e860269ba\") " pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.105294 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zt88v\" (UniqueName: \"kubernetes.io/projected/c22525fc-cc11-40ce-9f18-c59e860269ba-kube-api-access-zt88v\") pod \"dnsmasq-dns-6d7d647849-8j4tk\" (UID: \"c22525fc-cc11-40ce-9f18-c59e860269ba\") " pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.105388 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-ovsdbserver-sb\") pod \"dnsmasq-dns-6d7d647849-8j4tk\" (UID: \"c22525fc-cc11-40ce-9f18-c59e860269ba\") " pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.105493 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-config\") pod \"dnsmasq-dns-6d7d647849-8j4tk\" (UID: \"c22525fc-cc11-40ce-9f18-c59e860269ba\") " pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.105582 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-dns-svc\") pod \"dnsmasq-dns-6d7d647849-8j4tk\" (UID: \"c22525fc-cc11-40ce-9f18-c59e860269ba\") " pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.207316 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-ovsdbserver-sb\") pod \"dnsmasq-dns-6d7d647849-8j4tk\" (UID: \"c22525fc-cc11-40ce-9f18-c59e860269ba\") " pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.207374 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-config\") pod \"dnsmasq-dns-6d7d647849-8j4tk\" (UID: \"c22525fc-cc11-40ce-9f18-c59e860269ba\") " pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.207465 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-dns-svc\") pod \"dnsmasq-dns-6d7d647849-8j4tk\" (UID: \"c22525fc-cc11-40ce-9f18-c59e860269ba\") " pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.207491 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-ovsdbserver-nb\") pod \"dnsmasq-dns-6d7d647849-8j4tk\" (UID: \"c22525fc-cc11-40ce-9f18-c59e860269ba\") " pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.207580 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zt88v\" (UniqueName: \"kubernetes.io/projected/c22525fc-cc11-40ce-9f18-c59e860269ba-kube-api-access-zt88v\") pod \"dnsmasq-dns-6d7d647849-8j4tk\" (UID: \"c22525fc-cc11-40ce-9f18-c59e860269ba\") " pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.208365 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-config\") pod \"dnsmasq-dns-6d7d647849-8j4tk\" (UID: \"c22525fc-cc11-40ce-9f18-c59e860269ba\") " pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.208667 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-dns-svc\") pod \"dnsmasq-dns-6d7d647849-8j4tk\" (UID: \"c22525fc-cc11-40ce-9f18-c59e860269ba\") " pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.208715 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-ovsdbserver-nb\") pod \"dnsmasq-dns-6d7d647849-8j4tk\" (UID: \"c22525fc-cc11-40ce-9f18-c59e860269ba\") " pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.211568 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-ovsdbserver-sb\") pod \"dnsmasq-dns-6d7d647849-8j4tk\" (UID: \"c22525fc-cc11-40ce-9f18-c59e860269ba\") " pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.227355 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zt88v\" (UniqueName: \"kubernetes.io/projected/c22525fc-cc11-40ce-9f18-c59e860269ba-kube-api-access-zt88v\") pod \"dnsmasq-dns-6d7d647849-8j4tk\" (UID: \"c22525fc-cc11-40ce-9f18-c59e860269ba\") " pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.288558 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-655f9d4b56-jw9nr"] Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.290100 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-655f9d4b56-jw9nr" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.295979 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.296149 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-p6jp2" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.296276 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.296533 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.311829 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-config\") pod \"neutron-655f9d4b56-jw9nr\" (UID: \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\") " pod="openstack/neutron-655f9d4b56-jw9nr" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.312000 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-httpd-config\") pod \"neutron-655f9d4b56-jw9nr\" (UID: \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\") " pod="openstack/neutron-655f9d4b56-jw9nr" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.312035 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-combined-ca-bundle\") pod \"neutron-655f9d4b56-jw9nr\" (UID: \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\") " pod="openstack/neutron-655f9d4b56-jw9nr" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.312092 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kd9jb\" (UniqueName: \"kubernetes.io/projected/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-kube-api-access-kd9jb\") pod \"neutron-655f9d4b56-jw9nr\" (UID: \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\") " pod="openstack/neutron-655f9d4b56-jw9nr" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.312171 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-ovndb-tls-certs\") pod \"neutron-655f9d4b56-jw9nr\" (UID: \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\") " pod="openstack/neutron-655f9d4b56-jw9nr" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.326576 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-655f9d4b56-jw9nr"] Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.384296 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.414691 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-config\") pod \"neutron-655f9d4b56-jw9nr\" (UID: \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\") " pod="openstack/neutron-655f9d4b56-jw9nr" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.415280 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kd9jb\" (UniqueName: \"kubernetes.io/projected/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-kube-api-access-kd9jb\") pod \"neutron-655f9d4b56-jw9nr\" (UID: \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\") " pod="openstack/neutron-655f9d4b56-jw9nr" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.415304 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-httpd-config\") pod \"neutron-655f9d4b56-jw9nr\" (UID: \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\") " pod="openstack/neutron-655f9d4b56-jw9nr" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.415319 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-combined-ca-bundle\") pod \"neutron-655f9d4b56-jw9nr\" (UID: \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\") " pod="openstack/neutron-655f9d4b56-jw9nr" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.415340 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-ovndb-tls-certs\") pod \"neutron-655f9d4b56-jw9nr\" (UID: \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\") " pod="openstack/neutron-655f9d4b56-jw9nr" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.420948 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-ovndb-tls-certs\") pod \"neutron-655f9d4b56-jw9nr\" (UID: \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\") " pod="openstack/neutron-655f9d4b56-jw9nr" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.439911 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-combined-ca-bundle\") pod \"neutron-655f9d4b56-jw9nr\" (UID: \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\") " pod="openstack/neutron-655f9d4b56-jw9nr" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.441020 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-httpd-config\") pod \"neutron-655f9d4b56-jw9nr\" (UID: \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\") " pod="openstack/neutron-655f9d4b56-jw9nr" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.445417 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kd9jb\" (UniqueName: \"kubernetes.io/projected/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-kube-api-access-kd9jb\") pod \"neutron-655f9d4b56-jw9nr\" (UID: \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\") " pod="openstack/neutron-655f9d4b56-jw9nr" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.453824 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-config\") pod \"neutron-655f9d4b56-jw9nr\" (UID: \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\") " pod="openstack/neutron-655f9d4b56-jw9nr" Oct 09 09:20:37 crc kubenswrapper[4710]: I1009 09:20:37.615713 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-655f9d4b56-jw9nr" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.247598 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-bf8dfcdb5-zd6wv"] Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.248827 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.254615 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.255906 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.352936 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-bf8dfcdb5-zd6wv"] Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.361312 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff8c79d6-681e-4c93-b80e-15c8ff06d6af-ovndb-tls-certs\") pod \"neutron-bf8dfcdb5-zd6wv\" (UID: \"ff8c79d6-681e-4c93-b80e-15c8ff06d6af\") " pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.361378 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ff8c79d6-681e-4c93-b80e-15c8ff06d6af-config\") pod \"neutron-bf8dfcdb5-zd6wv\" (UID: \"ff8c79d6-681e-4c93-b80e-15c8ff06d6af\") " pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.361475 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ff8c79d6-681e-4c93-b80e-15c8ff06d6af-httpd-config\") pod \"neutron-bf8dfcdb5-zd6wv\" (UID: \"ff8c79d6-681e-4c93-b80e-15c8ff06d6af\") " pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.361516 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff8c79d6-681e-4c93-b80e-15c8ff06d6af-public-tls-certs\") pod \"neutron-bf8dfcdb5-zd6wv\" (UID: \"ff8c79d6-681e-4c93-b80e-15c8ff06d6af\") " pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.361545 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff8c79d6-681e-4c93-b80e-15c8ff06d6af-combined-ca-bundle\") pod \"neutron-bf8dfcdb5-zd6wv\" (UID: \"ff8c79d6-681e-4c93-b80e-15c8ff06d6af\") " pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.364142 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff8c79d6-681e-4c93-b80e-15c8ff06d6af-internal-tls-certs\") pod \"neutron-bf8dfcdb5-zd6wv\" (UID: \"ff8c79d6-681e-4c93-b80e-15c8ff06d6af\") " pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.364358 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6j6n2\" (UniqueName: \"kubernetes.io/projected/ff8c79d6-681e-4c93-b80e-15c8ff06d6af-kube-api-access-6j6n2\") pod \"neutron-bf8dfcdb5-zd6wv\" (UID: \"ff8c79d6-681e-4c93-b80e-15c8ff06d6af\") " pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.471174 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff8c79d6-681e-4c93-b80e-15c8ff06d6af-ovndb-tls-certs\") pod \"neutron-bf8dfcdb5-zd6wv\" (UID: \"ff8c79d6-681e-4c93-b80e-15c8ff06d6af\") " pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.471353 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ff8c79d6-681e-4c93-b80e-15c8ff06d6af-config\") pod \"neutron-bf8dfcdb5-zd6wv\" (UID: \"ff8c79d6-681e-4c93-b80e-15c8ff06d6af\") " pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.471465 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ff8c79d6-681e-4c93-b80e-15c8ff06d6af-httpd-config\") pod \"neutron-bf8dfcdb5-zd6wv\" (UID: \"ff8c79d6-681e-4c93-b80e-15c8ff06d6af\") " pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.471530 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff8c79d6-681e-4c93-b80e-15c8ff06d6af-public-tls-certs\") pod \"neutron-bf8dfcdb5-zd6wv\" (UID: \"ff8c79d6-681e-4c93-b80e-15c8ff06d6af\") " pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.471573 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff8c79d6-681e-4c93-b80e-15c8ff06d6af-combined-ca-bundle\") pod \"neutron-bf8dfcdb5-zd6wv\" (UID: \"ff8c79d6-681e-4c93-b80e-15c8ff06d6af\") " pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.471777 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff8c79d6-681e-4c93-b80e-15c8ff06d6af-internal-tls-certs\") pod \"neutron-bf8dfcdb5-zd6wv\" (UID: \"ff8c79d6-681e-4c93-b80e-15c8ff06d6af\") " pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.471814 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6j6n2\" (UniqueName: \"kubernetes.io/projected/ff8c79d6-681e-4c93-b80e-15c8ff06d6af-kube-api-access-6j6n2\") pod \"neutron-bf8dfcdb5-zd6wv\" (UID: \"ff8c79d6-681e-4c93-b80e-15c8ff06d6af\") " pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.486479 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6j6n2\" (UniqueName: \"kubernetes.io/projected/ff8c79d6-681e-4c93-b80e-15c8ff06d6af-kube-api-access-6j6n2\") pod \"neutron-bf8dfcdb5-zd6wv\" (UID: \"ff8c79d6-681e-4c93-b80e-15c8ff06d6af\") " pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.486956 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff8c79d6-681e-4c93-b80e-15c8ff06d6af-ovndb-tls-certs\") pod \"neutron-bf8dfcdb5-zd6wv\" (UID: \"ff8c79d6-681e-4c93-b80e-15c8ff06d6af\") " pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.487364 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff8c79d6-681e-4c93-b80e-15c8ff06d6af-internal-tls-certs\") pod \"neutron-bf8dfcdb5-zd6wv\" (UID: \"ff8c79d6-681e-4c93-b80e-15c8ff06d6af\") " pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.487474 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff8c79d6-681e-4c93-b80e-15c8ff06d6af-public-tls-certs\") pod \"neutron-bf8dfcdb5-zd6wv\" (UID: \"ff8c79d6-681e-4c93-b80e-15c8ff06d6af\") " pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.487944 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ff8c79d6-681e-4c93-b80e-15c8ff06d6af-httpd-config\") pod \"neutron-bf8dfcdb5-zd6wv\" (UID: \"ff8c79d6-681e-4c93-b80e-15c8ff06d6af\") " pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.490193 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff8c79d6-681e-4c93-b80e-15c8ff06d6af-combined-ca-bundle\") pod \"neutron-bf8dfcdb5-zd6wv\" (UID: \"ff8c79d6-681e-4c93-b80e-15c8ff06d6af\") " pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.491533 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/ff8c79d6-681e-4c93-b80e-15c8ff06d6af-config\") pod \"neutron-bf8dfcdb5-zd6wv\" (UID: \"ff8c79d6-681e-4c93-b80e-15c8ff06d6af\") " pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:39 crc kubenswrapper[4710]: I1009 09:20:39.570995 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:41 crc kubenswrapper[4710]: I1009 09:20:41.919506 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" podUID="b964db51-b1b4-4328-8134-3e4adf82dd59" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.135:5353: i/o timeout" Oct 09 09:20:44 crc kubenswrapper[4710]: I1009 09:20:44.501697 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" Oct 09 09:20:44 crc kubenswrapper[4710]: I1009 09:20:44.606628 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t495k\" (UniqueName: \"kubernetes.io/projected/b964db51-b1b4-4328-8134-3e4adf82dd59-kube-api-access-t495k\") pod \"b964db51-b1b4-4328-8134-3e4adf82dd59\" (UID: \"b964db51-b1b4-4328-8134-3e4adf82dd59\") " Oct 09 09:20:44 crc kubenswrapper[4710]: I1009 09:20:44.606961 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-config\") pod \"b964db51-b1b4-4328-8134-3e4adf82dd59\" (UID: \"b964db51-b1b4-4328-8134-3e4adf82dd59\") " Oct 09 09:20:44 crc kubenswrapper[4710]: I1009 09:20:44.607012 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-dns-svc\") pod \"b964db51-b1b4-4328-8134-3e4adf82dd59\" (UID: \"b964db51-b1b4-4328-8134-3e4adf82dd59\") " Oct 09 09:20:44 crc kubenswrapper[4710]: I1009 09:20:44.607058 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-ovsdbserver-nb\") pod \"b964db51-b1b4-4328-8134-3e4adf82dd59\" (UID: \"b964db51-b1b4-4328-8134-3e4adf82dd59\") " Oct 09 09:20:44 crc kubenswrapper[4710]: I1009 09:20:44.607205 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-ovsdbserver-sb\") pod \"b964db51-b1b4-4328-8134-3e4adf82dd59\" (UID: \"b964db51-b1b4-4328-8134-3e4adf82dd59\") " Oct 09 09:20:44 crc kubenswrapper[4710]: I1009 09:20:44.614399 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b964db51-b1b4-4328-8134-3e4adf82dd59-kube-api-access-t495k" (OuterVolumeSpecName: "kube-api-access-t495k") pod "b964db51-b1b4-4328-8134-3e4adf82dd59" (UID: "b964db51-b1b4-4328-8134-3e4adf82dd59"). InnerVolumeSpecName "kube-api-access-t495k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:20:44 crc kubenswrapper[4710]: I1009 09:20:44.675847 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-config" (OuterVolumeSpecName: "config") pod "b964db51-b1b4-4328-8134-3e4adf82dd59" (UID: "b964db51-b1b4-4328-8134-3e4adf82dd59"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:20:44 crc kubenswrapper[4710]: I1009 09:20:44.694190 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b964db51-b1b4-4328-8134-3e4adf82dd59" (UID: "b964db51-b1b4-4328-8134-3e4adf82dd59"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:20:44 crc kubenswrapper[4710]: I1009 09:20:44.694493 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b964db51-b1b4-4328-8134-3e4adf82dd59" (UID: "b964db51-b1b4-4328-8134-3e4adf82dd59"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:20:44 crc kubenswrapper[4710]: I1009 09:20:44.697865 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b964db51-b1b4-4328-8134-3e4adf82dd59" (UID: "b964db51-b1b4-4328-8134-3e4adf82dd59"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:20:44 crc kubenswrapper[4710]: I1009 09:20:44.709964 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t495k\" (UniqueName: \"kubernetes.io/projected/b964db51-b1b4-4328-8134-3e4adf82dd59-kube-api-access-t495k\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:44 crc kubenswrapper[4710]: I1009 09:20:44.709999 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:44 crc kubenswrapper[4710]: I1009 09:20:44.710009 4710 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:44 crc kubenswrapper[4710]: I1009 09:20:44.710019 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:44 crc kubenswrapper[4710]: I1009 09:20:44.710028 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b964db51-b1b4-4328-8134-3e4adf82dd59-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:44 crc kubenswrapper[4710]: I1009 09:20:44.911035 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-685c5ffc88-49dzx"] Oct 09 09:20:44 crc kubenswrapper[4710]: I1009 09:20:44.967610 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" event={"ID":"b964db51-b1b4-4328-8134-3e4adf82dd59","Type":"ContainerDied","Data":"5483b67b71dc0184a6aa0a6f3478671dbaa3f91c5a417aa5041bb6106d9abe1e"} Oct 09 09:20:44 crc kubenswrapper[4710]: I1009 09:20:44.967713 4710 scope.go:117] "RemoveContainer" containerID="cb200e797aebf6edf00497670b39540bb193adbc4ae78b4c6fcb0b365698928c" Oct 09 09:20:44 crc kubenswrapper[4710]: I1009 09:20:44.967670 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" Oct 09 09:20:44 crc kubenswrapper[4710]: I1009 09:20:44.997192 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f549f7b99-6bx4v"] Oct 09 09:20:45 crc kubenswrapper[4710]: I1009 09:20:45.001710 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7f549f7b99-6bx4v"] Oct 09 09:20:45 crc kubenswrapper[4710]: E1009 09:20:45.659668 4710 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:85c75d60e1bd2f8a9ea0a2bb21a8df64c0a6f7b504cc1a05a355981d4b90e92f" Oct 09 09:20:45 crc kubenswrapper[4710]: E1009 09:20:45.660154 4710 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:85c75d60e1bd2f8a9ea0a2bb21a8df64c0a6f7b504cc1a05a355981d4b90e92f,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8t9mg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-rmqx2_openstack(c9e9fec6-00af-46a7-9a1f-a59b6b06969c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 09:20:45 crc kubenswrapper[4710]: E1009 09:20:45.662127 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-rmqx2" podUID="c9e9fec6-00af-46a7-9a1f-a59b6b06969c" Oct 09 09:20:45 crc kubenswrapper[4710]: W1009 09:20:45.693777 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda038f2bb_2364_4a8d_918c_a0776dfa8458.slice/crio-7960ab386a158dcc17c2147269600852255a3c3953c9da827c4e27b4468e6010 WatchSource:0}: Error finding container 7960ab386a158dcc17c2147269600852255a3c3953c9da827c4e27b4468e6010: Status 404 returned error can't find the container with id 7960ab386a158dcc17c2147269600852255a3c3953c9da827c4e27b4468e6010 Oct 09 09:20:45 crc kubenswrapper[4710]: I1009 09:20:45.717426 4710 scope.go:117] "RemoveContainer" containerID="0751473b3b0f75d39e44c7f266d286aee38e49222e5d6e405643b5c36ac12c45" Oct 09 09:20:45 crc kubenswrapper[4710]: I1009 09:20:45.985194 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-685c5ffc88-49dzx" event={"ID":"a038f2bb-2364-4a8d-918c-a0776dfa8458","Type":"ContainerStarted","Data":"7960ab386a158dcc17c2147269600852255a3c3953c9da827c4e27b4468e6010"} Oct 09 09:20:45 crc kubenswrapper[4710]: I1009 09:20:45.988311 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a28920d-7c53-4e95-990d-d7499229899f","Type":"ContainerStarted","Data":"c3e837075ab7aca4ed7966170ec68b220aa7b909c7b9372b457ca94f1d8fcc34"} Oct 09 09:20:45 crc kubenswrapper[4710]: I1009 09:20:45.989553 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-g5glz" event={"ID":"972b488b-f691-4100-b183-692b2e27f665","Type":"ContainerStarted","Data":"58175b2ee152dc9210861748435624e7594fb216a19cdb4d98ba91e3be1c649d"} Oct 09 09:20:45 crc kubenswrapper[4710]: E1009 09:20:45.995538 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:85c75d60e1bd2f8a9ea0a2bb21a8df64c0a6f7b504cc1a05a355981d4b90e92f\\\"\"" pod="openstack/cinder-db-sync-rmqx2" podUID="c9e9fec6-00af-46a7-9a1f-a59b6b06969c" Oct 09 09:20:46 crc kubenswrapper[4710]: I1009 09:20:46.010170 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-g5glz" podStartSLOduration=7.752916751 podStartE2EDuration="30.010157606s" podCreationTimestamp="2025-10-09 09:20:16 +0000 UTC" firstStartedPulling="2025-10-09 09:20:22.155719701 +0000 UTC m=+945.645828098" lastFinishedPulling="2025-10-09 09:20:44.412960556 +0000 UTC m=+967.903068953" observedRunningTime="2025-10-09 09:20:46.005559455 +0000 UTC m=+969.495667853" watchObservedRunningTime="2025-10-09 09:20:46.010157606 +0000 UTC m=+969.500266004" Oct 09 09:20:46 crc kubenswrapper[4710]: I1009 09:20:46.234876 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7746794c77-ljwdn"] Oct 09 09:20:46 crc kubenswrapper[4710]: I1009 09:20:46.297268 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-bf8dfcdb5-zd6wv"] Oct 09 09:20:46 crc kubenswrapper[4710]: W1009 09:20:46.307356 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff8c79d6_681e_4c93_b80e_15c8ff06d6af.slice/crio-f7f8c4661f6d487236243891773911cfd9cb5f2cd98c0c52538a08ca003efb24 WatchSource:0}: Error finding container f7f8c4661f6d487236243891773911cfd9cb5f2cd98c0c52538a08ca003efb24: Status 404 returned error can't find the container with id f7f8c4661f6d487236243891773911cfd9cb5f2cd98c0c52538a08ca003efb24 Oct 09 09:20:46 crc kubenswrapper[4710]: I1009 09:20:46.360731 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d7d647849-8j4tk"] Oct 09 09:20:46 crc kubenswrapper[4710]: I1009 09:20:46.431593 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-655f9d4b56-jw9nr"] Oct 09 09:20:46 crc kubenswrapper[4710]: W1009 09:20:46.441582 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda9a88de3_88a2_4f91_8c5a_cafb299cd9ac.slice/crio-d671a8139c3e7f885b6236bcb926f98cd4c2022ae1b417292960d9de05ff628d WatchSource:0}: Error finding container d671a8139c3e7f885b6236bcb926f98cd4c2022ae1b417292960d9de05ff628d: Status 404 returned error can't find the container with id d671a8139c3e7f885b6236bcb926f98cd4c2022ae1b417292960d9de05ff628d Oct 09 09:20:46 crc kubenswrapper[4710]: I1009 09:20:46.823844 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b964db51-b1b4-4328-8134-3e4adf82dd59" path="/var/lib/kubelet/pods/b964db51-b1b4-4328-8134-3e4adf82dd59/volumes" Oct 09 09:20:46 crc kubenswrapper[4710]: I1009 09:20:46.921520 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7f549f7b99-6bx4v" podUID="b964db51-b1b4-4328-8134-3e4adf82dd59" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.135:5353: i/o timeout" Oct 09 09:20:47 crc kubenswrapper[4710]: I1009 09:20:47.005969 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7746794c77-ljwdn" event={"ID":"aa709644-7781-443c-b0e3-d5936fff1dde","Type":"ContainerStarted","Data":"495d2dbafc68c7c82a838dc78e2aaaaaeda386b1c4098f7e20a3f927c089f883"} Oct 09 09:20:47 crc kubenswrapper[4710]: I1009 09:20:47.006320 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:20:47 crc kubenswrapper[4710]: I1009 09:20:47.006465 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7746794c77-ljwdn" event={"ID":"aa709644-7781-443c-b0e3-d5936fff1dde","Type":"ContainerStarted","Data":"427112dbd8f8faa1490accd6b5930c628a05df4896098edc315d601aafb1fe26"} Oct 09 09:20:47 crc kubenswrapper[4710]: I1009 09:20:47.009335 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-685c5ffc88-49dzx" event={"ID":"a038f2bb-2364-4a8d-918c-a0776dfa8458","Type":"ContainerStarted","Data":"6a668ffe6d78078eb3c881cb6bc90af64b4da8b3bd25dd01d2155c2b50e36fc0"} Oct 09 09:20:47 crc kubenswrapper[4710]: I1009 09:20:47.009812 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-685c5ffc88-49dzx" event={"ID":"a038f2bb-2364-4a8d-918c-a0776dfa8458","Type":"ContainerStarted","Data":"8e6c255f8b398243d740f51db56e43e8b31a64d8b628a81b1bc40845cd3c39af"} Oct 09 09:20:47 crc kubenswrapper[4710]: I1009 09:20:47.010013 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:47 crc kubenswrapper[4710]: I1009 09:20:47.010137 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:47 crc kubenswrapper[4710]: I1009 09:20:47.011582 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-bf8dfcdb5-zd6wv" event={"ID":"ff8c79d6-681e-4c93-b80e-15c8ff06d6af","Type":"ContainerStarted","Data":"be35f0a95ed1688adac1c798c783e83776e8f8394b09024da645504f328564f6"} Oct 09 09:20:47 crc kubenswrapper[4710]: I1009 09:20:47.011942 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-bf8dfcdb5-zd6wv" event={"ID":"ff8c79d6-681e-4c93-b80e-15c8ff06d6af","Type":"ContainerStarted","Data":"0debd0d85e96d66c4bf0b55ff9121d461c38c5637e824c4aeca415b182c746d1"} Oct 09 09:20:47 crc kubenswrapper[4710]: I1009 09:20:47.011971 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:20:47 crc kubenswrapper[4710]: I1009 09:20:47.011983 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-bf8dfcdb5-zd6wv" event={"ID":"ff8c79d6-681e-4c93-b80e-15c8ff06d6af","Type":"ContainerStarted","Data":"f7f8c4661f6d487236243891773911cfd9cb5f2cd98c0c52538a08ca003efb24"} Oct 09 09:20:47 crc kubenswrapper[4710]: I1009 09:20:47.013386 4710 generic.go:334] "Generic (PLEG): container finished" podID="c22525fc-cc11-40ce-9f18-c59e860269ba" containerID="68137071bf3d95c6709b65aaf0e46c371a5beaa944d54e4ba7a68eb34e29f835" exitCode=0 Oct 09 09:20:47 crc kubenswrapper[4710]: I1009 09:20:47.013474 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" event={"ID":"c22525fc-cc11-40ce-9f18-c59e860269ba","Type":"ContainerDied","Data":"68137071bf3d95c6709b65aaf0e46c371a5beaa944d54e4ba7a68eb34e29f835"} Oct 09 09:20:47 crc kubenswrapper[4710]: I1009 09:20:47.013500 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" event={"ID":"c22525fc-cc11-40ce-9f18-c59e860269ba","Type":"ContainerStarted","Data":"b9a6d12103db1ba1a394a9ce4a0520856659685f45b867389d088e1415339d6d"} Oct 09 09:20:47 crc kubenswrapper[4710]: I1009 09:20:47.015974 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-655f9d4b56-jw9nr" event={"ID":"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac","Type":"ContainerStarted","Data":"8f194ab7b3d2a496c0beb362af7650aa9d4b2bca4ff553742b37aee8dbc32e28"} Oct 09 09:20:47 crc kubenswrapper[4710]: I1009 09:20:47.016025 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-655f9d4b56-jw9nr" event={"ID":"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac","Type":"ContainerStarted","Data":"a3d876af5f44c7bc1a90e562a2f6dcf6cfbc967a24db943267055bc38f2678d6"} Oct 09 09:20:47 crc kubenswrapper[4710]: I1009 09:20:47.016042 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-655f9d4b56-jw9nr" event={"ID":"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac","Type":"ContainerStarted","Data":"d671a8139c3e7f885b6236bcb926f98cd4c2022ae1b417292960d9de05ff628d"} Oct 09 09:20:47 crc kubenswrapper[4710]: I1009 09:20:47.017235 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-655f9d4b56-jw9nr" Oct 09 09:20:47 crc kubenswrapper[4710]: I1009 09:20:47.033590 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-7746794c77-ljwdn" podStartSLOduration=15.033574892 podStartE2EDuration="15.033574892s" podCreationTimestamp="2025-10-09 09:20:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:20:47.032268037 +0000 UTC m=+970.522376425" watchObservedRunningTime="2025-10-09 09:20:47.033574892 +0000 UTC m=+970.523683289" Oct 09 09:20:47 crc kubenswrapper[4710]: I1009 09:20:47.053914 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-655f9d4b56-jw9nr" podStartSLOduration=10.053895177 podStartE2EDuration="10.053895177s" podCreationTimestamp="2025-10-09 09:20:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:20:47.052899991 +0000 UTC m=+970.543008388" watchObservedRunningTime="2025-10-09 09:20:47.053895177 +0000 UTC m=+970.544003574" Oct 09 09:20:47 crc kubenswrapper[4710]: I1009 09:20:47.093787 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-bf8dfcdb5-zd6wv" podStartSLOduration=8.093767721 podStartE2EDuration="8.093767721s" podCreationTimestamp="2025-10-09 09:20:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:20:47.09232959 +0000 UTC m=+970.582437986" watchObservedRunningTime="2025-10-09 09:20:47.093767721 +0000 UTC m=+970.583876118" Oct 09 09:20:48 crc kubenswrapper[4710]: I1009 09:20:48.026444 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" event={"ID":"c22525fc-cc11-40ce-9f18-c59e860269ba","Type":"ContainerStarted","Data":"33ad3f5bbcbe8e274420f9daca82f696b1a77ed86a8b58d48438bd26f46b904f"} Oct 09 09:20:48 crc kubenswrapper[4710]: I1009 09:20:48.027796 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" Oct 09 09:20:48 crc kubenswrapper[4710]: I1009 09:20:48.029183 4710 generic.go:334] "Generic (PLEG): container finished" podID="972b488b-f691-4100-b183-692b2e27f665" containerID="58175b2ee152dc9210861748435624e7594fb216a19cdb4d98ba91e3be1c649d" exitCode=0 Oct 09 09:20:48 crc kubenswrapper[4710]: I1009 09:20:48.030071 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-g5glz" event={"ID":"972b488b-f691-4100-b183-692b2e27f665","Type":"ContainerDied","Data":"58175b2ee152dc9210861748435624e7594fb216a19cdb4d98ba91e3be1c649d"} Oct 09 09:20:48 crc kubenswrapper[4710]: I1009 09:20:48.050390 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" podStartSLOduration=12.050368257 podStartE2EDuration="12.050368257s" podCreationTimestamp="2025-10-09 09:20:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:20:48.044910415 +0000 UTC m=+971.535018812" watchObservedRunningTime="2025-10-09 09:20:48.050368257 +0000 UTC m=+971.540476654" Oct 09 09:20:48 crc kubenswrapper[4710]: I1009 09:20:48.051916 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-685c5ffc88-49dzx" podStartSLOduration=22.051901157 podStartE2EDuration="22.051901157s" podCreationTimestamp="2025-10-09 09:20:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:20:47.132703228 +0000 UTC m=+970.622811625" watchObservedRunningTime="2025-10-09 09:20:48.051901157 +0000 UTC m=+971.542009554" Oct 09 09:20:49 crc kubenswrapper[4710]: I1009 09:20:49.398886 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-g5glz" Oct 09 09:20:49 crc kubenswrapper[4710]: I1009 09:20:49.556736 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/972b488b-f691-4100-b183-692b2e27f665-db-sync-config-data\") pod \"972b488b-f691-4100-b183-692b2e27f665\" (UID: \"972b488b-f691-4100-b183-692b2e27f665\") " Oct 09 09:20:49 crc kubenswrapper[4710]: I1009 09:20:49.556872 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8c9ft\" (UniqueName: \"kubernetes.io/projected/972b488b-f691-4100-b183-692b2e27f665-kube-api-access-8c9ft\") pod \"972b488b-f691-4100-b183-692b2e27f665\" (UID: \"972b488b-f691-4100-b183-692b2e27f665\") " Oct 09 09:20:49 crc kubenswrapper[4710]: I1009 09:20:49.556894 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/972b488b-f691-4100-b183-692b2e27f665-combined-ca-bundle\") pod \"972b488b-f691-4100-b183-692b2e27f665\" (UID: \"972b488b-f691-4100-b183-692b2e27f665\") " Oct 09 09:20:49 crc kubenswrapper[4710]: I1009 09:20:49.566152 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/972b488b-f691-4100-b183-692b2e27f665-kube-api-access-8c9ft" (OuterVolumeSpecName: "kube-api-access-8c9ft") pod "972b488b-f691-4100-b183-692b2e27f665" (UID: "972b488b-f691-4100-b183-692b2e27f665"). InnerVolumeSpecName "kube-api-access-8c9ft". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:20:49 crc kubenswrapper[4710]: I1009 09:20:49.570395 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/972b488b-f691-4100-b183-692b2e27f665-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "972b488b-f691-4100-b183-692b2e27f665" (UID: "972b488b-f691-4100-b183-692b2e27f665"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:20:49 crc kubenswrapper[4710]: I1009 09:20:49.589004 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/972b488b-f691-4100-b183-692b2e27f665-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "972b488b-f691-4100-b183-692b2e27f665" (UID: "972b488b-f691-4100-b183-692b2e27f665"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:20:49 crc kubenswrapper[4710]: I1009 09:20:49.658608 4710 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/972b488b-f691-4100-b183-692b2e27f665-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:49 crc kubenswrapper[4710]: I1009 09:20:49.658633 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8c9ft\" (UniqueName: \"kubernetes.io/projected/972b488b-f691-4100-b183-692b2e27f665-kube-api-access-8c9ft\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:49 crc kubenswrapper[4710]: I1009 09:20:49.658644 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/972b488b-f691-4100-b183-692b2e27f665-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.051018 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-g5glz" event={"ID":"972b488b-f691-4100-b183-692b2e27f665","Type":"ContainerDied","Data":"e0d6876c489e07be999d882f98a1d123c4b8bc62e65699b5ca329c396f404c09"} Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.051408 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e0d6876c489e07be999d882f98a1d123c4b8bc62e65699b5ca329c396f404c09" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.051046 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-g5glz" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.283953 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-768bd74b7c-lmgpx"] Oct 09 09:20:50 crc kubenswrapper[4710]: E1009 09:20:50.284292 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="972b488b-f691-4100-b183-692b2e27f665" containerName="barbican-db-sync" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.284305 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="972b488b-f691-4100-b183-692b2e27f665" containerName="barbican-db-sync" Oct 09 09:20:50 crc kubenswrapper[4710]: E1009 09:20:50.284316 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b964db51-b1b4-4328-8134-3e4adf82dd59" containerName="init" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.284320 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="b964db51-b1b4-4328-8134-3e4adf82dd59" containerName="init" Oct 09 09:20:50 crc kubenswrapper[4710]: E1009 09:20:50.284357 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b964db51-b1b4-4328-8134-3e4adf82dd59" containerName="dnsmasq-dns" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.284363 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="b964db51-b1b4-4328-8134-3e4adf82dd59" containerName="dnsmasq-dns" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.284571 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="b964db51-b1b4-4328-8134-3e4adf82dd59" containerName="dnsmasq-dns" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.284585 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="972b488b-f691-4100-b183-692b2e27f665" containerName="barbican-db-sync" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.285412 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-768bd74b7c-lmgpx" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.291995 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.292716 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.293213 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-vqrvr" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.296712 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-6b5698f678-pqrnp"] Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.297814 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6b5698f678-pqrnp" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.299598 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.302887 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-768bd74b7c-lmgpx"] Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.319967 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6b5698f678-pqrnp"] Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.381815 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c369359e-6e4c-478b-8ef5-0ebd384acbd8-config-data-custom\") pod \"barbican-worker-768bd74b7c-lmgpx\" (UID: \"c369359e-6e4c-478b-8ef5-0ebd384acbd8\") " pod="openstack/barbican-worker-768bd74b7c-lmgpx" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.381873 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a03d872-b139-414d-a62f-953c23fb01a6-config-data\") pod \"barbican-keystone-listener-6b5698f678-pqrnp\" (UID: \"5a03d872-b139-414d-a62f-953c23fb01a6\") " pod="openstack/barbican-keystone-listener-6b5698f678-pqrnp" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.382001 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cp8vg\" (UniqueName: \"kubernetes.io/projected/5a03d872-b139-414d-a62f-953c23fb01a6-kube-api-access-cp8vg\") pod \"barbican-keystone-listener-6b5698f678-pqrnp\" (UID: \"5a03d872-b139-414d-a62f-953c23fb01a6\") " pod="openstack/barbican-keystone-listener-6b5698f678-pqrnp" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.382167 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a03d872-b139-414d-a62f-953c23fb01a6-logs\") pod \"barbican-keystone-listener-6b5698f678-pqrnp\" (UID: \"5a03d872-b139-414d-a62f-953c23fb01a6\") " pod="openstack/barbican-keystone-listener-6b5698f678-pqrnp" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.382256 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5a03d872-b139-414d-a62f-953c23fb01a6-config-data-custom\") pod \"barbican-keystone-listener-6b5698f678-pqrnp\" (UID: \"5a03d872-b139-414d-a62f-953c23fb01a6\") " pod="openstack/barbican-keystone-listener-6b5698f678-pqrnp" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.382276 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c369359e-6e4c-478b-8ef5-0ebd384acbd8-logs\") pod \"barbican-worker-768bd74b7c-lmgpx\" (UID: \"c369359e-6e4c-478b-8ef5-0ebd384acbd8\") " pod="openstack/barbican-worker-768bd74b7c-lmgpx" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.382306 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c369359e-6e4c-478b-8ef5-0ebd384acbd8-config-data\") pod \"barbican-worker-768bd74b7c-lmgpx\" (UID: \"c369359e-6e4c-478b-8ef5-0ebd384acbd8\") " pod="openstack/barbican-worker-768bd74b7c-lmgpx" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.382388 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sld78\" (UniqueName: \"kubernetes.io/projected/c369359e-6e4c-478b-8ef5-0ebd384acbd8-kube-api-access-sld78\") pod \"barbican-worker-768bd74b7c-lmgpx\" (UID: \"c369359e-6e4c-478b-8ef5-0ebd384acbd8\") " pod="openstack/barbican-worker-768bd74b7c-lmgpx" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.382476 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a03d872-b139-414d-a62f-953c23fb01a6-combined-ca-bundle\") pod \"barbican-keystone-listener-6b5698f678-pqrnp\" (UID: \"5a03d872-b139-414d-a62f-953c23fb01a6\") " pod="openstack/barbican-keystone-listener-6b5698f678-pqrnp" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.382514 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c369359e-6e4c-478b-8ef5-0ebd384acbd8-combined-ca-bundle\") pod \"barbican-worker-768bd74b7c-lmgpx\" (UID: \"c369359e-6e4c-478b-8ef5-0ebd384acbd8\") " pod="openstack/barbican-worker-768bd74b7c-lmgpx" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.410061 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d7d647849-8j4tk"] Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.437700 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9"] Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.439187 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.484717 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c369359e-6e4c-478b-8ef5-0ebd384acbd8-config-data-custom\") pod \"barbican-worker-768bd74b7c-lmgpx\" (UID: \"c369359e-6e4c-478b-8ef5-0ebd384acbd8\") " pod="openstack/barbican-worker-768bd74b7c-lmgpx" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.484779 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a03d872-b139-414d-a62f-953c23fb01a6-config-data\") pod \"barbican-keystone-listener-6b5698f678-pqrnp\" (UID: \"5a03d872-b139-414d-a62f-953c23fb01a6\") " pod="openstack/barbican-keystone-listener-6b5698f678-pqrnp" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.484846 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cp8vg\" (UniqueName: \"kubernetes.io/projected/5a03d872-b139-414d-a62f-953c23fb01a6-kube-api-access-cp8vg\") pod \"barbican-keystone-listener-6b5698f678-pqrnp\" (UID: \"5a03d872-b139-414d-a62f-953c23fb01a6\") " pod="openstack/barbican-keystone-listener-6b5698f678-pqrnp" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.484869 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mpqh\" (UniqueName: \"kubernetes.io/projected/86ca38b0-140d-419f-9a96-6c9b3825a1ea-kube-api-access-2mpqh\") pod \"dnsmasq-dns-7ff5bdc4b9-vtdw9\" (UID: \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\") " pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.484901 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5bdc4b9-vtdw9\" (UID: \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\") " pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.484950 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-dns-svc\") pod \"dnsmasq-dns-7ff5bdc4b9-vtdw9\" (UID: \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\") " pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.485012 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a03d872-b139-414d-a62f-953c23fb01a6-logs\") pod \"barbican-keystone-listener-6b5698f678-pqrnp\" (UID: \"5a03d872-b139-414d-a62f-953c23fb01a6\") " pod="openstack/barbican-keystone-listener-6b5698f678-pqrnp" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.485044 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-config\") pod \"dnsmasq-dns-7ff5bdc4b9-vtdw9\" (UID: \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\") " pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.485077 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5a03d872-b139-414d-a62f-953c23fb01a6-config-data-custom\") pod \"barbican-keystone-listener-6b5698f678-pqrnp\" (UID: \"5a03d872-b139-414d-a62f-953c23fb01a6\") " pod="openstack/barbican-keystone-listener-6b5698f678-pqrnp" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.485100 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c369359e-6e4c-478b-8ef5-0ebd384acbd8-logs\") pod \"barbican-worker-768bd74b7c-lmgpx\" (UID: \"c369359e-6e4c-478b-8ef5-0ebd384acbd8\") " pod="openstack/barbican-worker-768bd74b7c-lmgpx" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.485122 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c369359e-6e4c-478b-8ef5-0ebd384acbd8-config-data\") pod \"barbican-worker-768bd74b7c-lmgpx\" (UID: \"c369359e-6e4c-478b-8ef5-0ebd384acbd8\") " pod="openstack/barbican-worker-768bd74b7c-lmgpx" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.485158 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5bdc4b9-vtdw9\" (UID: \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\") " pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.485206 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sld78\" (UniqueName: \"kubernetes.io/projected/c369359e-6e4c-478b-8ef5-0ebd384acbd8-kube-api-access-sld78\") pod \"barbican-worker-768bd74b7c-lmgpx\" (UID: \"c369359e-6e4c-478b-8ef5-0ebd384acbd8\") " pod="openstack/barbican-worker-768bd74b7c-lmgpx" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.485265 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a03d872-b139-414d-a62f-953c23fb01a6-combined-ca-bundle\") pod \"barbican-keystone-listener-6b5698f678-pqrnp\" (UID: \"5a03d872-b139-414d-a62f-953c23fb01a6\") " pod="openstack/barbican-keystone-listener-6b5698f678-pqrnp" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.485300 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c369359e-6e4c-478b-8ef5-0ebd384acbd8-combined-ca-bundle\") pod \"barbican-worker-768bd74b7c-lmgpx\" (UID: \"c369359e-6e4c-478b-8ef5-0ebd384acbd8\") " pod="openstack/barbican-worker-768bd74b7c-lmgpx" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.487847 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a03d872-b139-414d-a62f-953c23fb01a6-logs\") pod \"barbican-keystone-listener-6b5698f678-pqrnp\" (UID: \"5a03d872-b139-414d-a62f-953c23fb01a6\") " pod="openstack/barbican-keystone-listener-6b5698f678-pqrnp" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.488211 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c369359e-6e4c-478b-8ef5-0ebd384acbd8-logs\") pod \"barbican-worker-768bd74b7c-lmgpx\" (UID: \"c369359e-6e4c-478b-8ef5-0ebd384acbd8\") " pod="openstack/barbican-worker-768bd74b7c-lmgpx" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.496568 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a03d872-b139-414d-a62f-953c23fb01a6-combined-ca-bundle\") pod \"barbican-keystone-listener-6b5698f678-pqrnp\" (UID: \"5a03d872-b139-414d-a62f-953c23fb01a6\") " pod="openstack/barbican-keystone-listener-6b5698f678-pqrnp" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.497550 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a03d872-b139-414d-a62f-953c23fb01a6-config-data\") pod \"barbican-keystone-listener-6b5698f678-pqrnp\" (UID: \"5a03d872-b139-414d-a62f-953c23fb01a6\") " pod="openstack/barbican-keystone-listener-6b5698f678-pqrnp" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.498973 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c369359e-6e4c-478b-8ef5-0ebd384acbd8-config-data-custom\") pod \"barbican-worker-768bd74b7c-lmgpx\" (UID: \"c369359e-6e4c-478b-8ef5-0ebd384acbd8\") " pod="openstack/barbican-worker-768bd74b7c-lmgpx" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.500416 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5a03d872-b139-414d-a62f-953c23fb01a6-config-data-custom\") pod \"barbican-keystone-listener-6b5698f678-pqrnp\" (UID: \"5a03d872-b139-414d-a62f-953c23fb01a6\") " pod="openstack/barbican-keystone-listener-6b5698f678-pqrnp" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.500886 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c369359e-6e4c-478b-8ef5-0ebd384acbd8-config-data\") pod \"barbican-worker-768bd74b7c-lmgpx\" (UID: \"c369359e-6e4c-478b-8ef5-0ebd384acbd8\") " pod="openstack/barbican-worker-768bd74b7c-lmgpx" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.511124 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c369359e-6e4c-478b-8ef5-0ebd384acbd8-combined-ca-bundle\") pod \"barbican-worker-768bd74b7c-lmgpx\" (UID: \"c369359e-6e4c-478b-8ef5-0ebd384acbd8\") " pod="openstack/barbican-worker-768bd74b7c-lmgpx" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.522146 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9"] Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.564080 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sld78\" (UniqueName: \"kubernetes.io/projected/c369359e-6e4c-478b-8ef5-0ebd384acbd8-kube-api-access-sld78\") pod \"barbican-worker-768bd74b7c-lmgpx\" (UID: \"c369359e-6e4c-478b-8ef5-0ebd384acbd8\") " pod="openstack/barbican-worker-768bd74b7c-lmgpx" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.564176 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cp8vg\" (UniqueName: \"kubernetes.io/projected/5a03d872-b139-414d-a62f-953c23fb01a6-kube-api-access-cp8vg\") pod \"barbican-keystone-listener-6b5698f678-pqrnp\" (UID: \"5a03d872-b139-414d-a62f-953c23fb01a6\") " pod="openstack/barbican-keystone-listener-6b5698f678-pqrnp" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.588916 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mpqh\" (UniqueName: \"kubernetes.io/projected/86ca38b0-140d-419f-9a96-6c9b3825a1ea-kube-api-access-2mpqh\") pod \"dnsmasq-dns-7ff5bdc4b9-vtdw9\" (UID: \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\") " pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.588984 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5bdc4b9-vtdw9\" (UID: \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\") " pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.589013 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-dns-svc\") pod \"dnsmasq-dns-7ff5bdc4b9-vtdw9\" (UID: \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\") " pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.589069 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-config\") pod \"dnsmasq-dns-7ff5bdc4b9-vtdw9\" (UID: \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\") " pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.589101 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5bdc4b9-vtdw9\" (UID: \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\") " pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.590242 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-dns-svc\") pod \"dnsmasq-dns-7ff5bdc4b9-vtdw9\" (UID: \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\") " pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.590276 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-config\") pod \"dnsmasq-dns-7ff5bdc4b9-vtdw9\" (UID: \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\") " pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.590857 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5bdc4b9-vtdw9\" (UID: \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\") " pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.590945 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5bdc4b9-vtdw9\" (UID: \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\") " pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.615814 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-768bd74b7c-lmgpx" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.641032 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6b5698f678-pqrnp" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.642715 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mpqh\" (UniqueName: \"kubernetes.io/projected/86ca38b0-140d-419f-9a96-6c9b3825a1ea-kube-api-access-2mpqh\") pod \"dnsmasq-dns-7ff5bdc4b9-vtdw9\" (UID: \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\") " pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.705366 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-78c6b597d-zsdxm"] Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.706806 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.709442 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.724477 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-78c6b597d-zsdxm"] Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.778407 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.794786 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad719353-0773-432e-921f-a1480314f1c2-config-data\") pod \"barbican-api-78c6b597d-zsdxm\" (UID: \"ad719353-0773-432e-921f-a1480314f1c2\") " pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.794834 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad719353-0773-432e-921f-a1480314f1c2-logs\") pod \"barbican-api-78c6b597d-zsdxm\" (UID: \"ad719353-0773-432e-921f-a1480314f1c2\") " pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.794881 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6plx\" (UniqueName: \"kubernetes.io/projected/ad719353-0773-432e-921f-a1480314f1c2-kube-api-access-p6plx\") pod \"barbican-api-78c6b597d-zsdxm\" (UID: \"ad719353-0773-432e-921f-a1480314f1c2\") " pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.794927 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad719353-0773-432e-921f-a1480314f1c2-combined-ca-bundle\") pod \"barbican-api-78c6b597d-zsdxm\" (UID: \"ad719353-0773-432e-921f-a1480314f1c2\") " pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.794967 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ad719353-0773-432e-921f-a1480314f1c2-config-data-custom\") pod \"barbican-api-78c6b597d-zsdxm\" (UID: \"ad719353-0773-432e-921f-a1480314f1c2\") " pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.898452 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad719353-0773-432e-921f-a1480314f1c2-config-data\") pod \"barbican-api-78c6b597d-zsdxm\" (UID: \"ad719353-0773-432e-921f-a1480314f1c2\") " pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.898510 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad719353-0773-432e-921f-a1480314f1c2-logs\") pod \"barbican-api-78c6b597d-zsdxm\" (UID: \"ad719353-0773-432e-921f-a1480314f1c2\") " pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.898557 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6plx\" (UniqueName: \"kubernetes.io/projected/ad719353-0773-432e-921f-a1480314f1c2-kube-api-access-p6plx\") pod \"barbican-api-78c6b597d-zsdxm\" (UID: \"ad719353-0773-432e-921f-a1480314f1c2\") " pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.898605 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad719353-0773-432e-921f-a1480314f1c2-combined-ca-bundle\") pod \"barbican-api-78c6b597d-zsdxm\" (UID: \"ad719353-0773-432e-921f-a1480314f1c2\") " pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.898646 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ad719353-0773-432e-921f-a1480314f1c2-config-data-custom\") pod \"barbican-api-78c6b597d-zsdxm\" (UID: \"ad719353-0773-432e-921f-a1480314f1c2\") " pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.899560 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad719353-0773-432e-921f-a1480314f1c2-logs\") pod \"barbican-api-78c6b597d-zsdxm\" (UID: \"ad719353-0773-432e-921f-a1480314f1c2\") " pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.907024 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad719353-0773-432e-921f-a1480314f1c2-combined-ca-bundle\") pod \"barbican-api-78c6b597d-zsdxm\" (UID: \"ad719353-0773-432e-921f-a1480314f1c2\") " pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.907551 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ad719353-0773-432e-921f-a1480314f1c2-config-data-custom\") pod \"barbican-api-78c6b597d-zsdxm\" (UID: \"ad719353-0773-432e-921f-a1480314f1c2\") " pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.916274 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad719353-0773-432e-921f-a1480314f1c2-config-data\") pod \"barbican-api-78c6b597d-zsdxm\" (UID: \"ad719353-0773-432e-921f-a1480314f1c2\") " pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:20:50 crc kubenswrapper[4710]: I1009 09:20:50.922560 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6plx\" (UniqueName: \"kubernetes.io/projected/ad719353-0773-432e-921f-a1480314f1c2-kube-api-access-p6plx\") pod \"barbican-api-78c6b597d-zsdxm\" (UID: \"ad719353-0773-432e-921f-a1480314f1c2\") " pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:20:51 crc kubenswrapper[4710]: I1009 09:20:51.036863 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:20:51 crc kubenswrapper[4710]: I1009 09:20:51.056697 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" podUID="c22525fc-cc11-40ce-9f18-c59e860269ba" containerName="dnsmasq-dns" containerID="cri-o://33ad3f5bbcbe8e274420f9daca82f696b1a77ed86a8b58d48438bd26f46b904f" gracePeriod=10 Oct 09 09:20:51 crc kubenswrapper[4710]: I1009 09:20:51.754450 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-768bd74b7c-lmgpx"] Oct 09 09:20:51 crc kubenswrapper[4710]: I1009 09:20:51.798199 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9"] Oct 09 09:20:51 crc kubenswrapper[4710]: W1009 09:20:51.819936 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod86ca38b0_140d_419f_9a96_6c9b3825a1ea.slice/crio-d16b915ec888fb8eb81120578afb08b4001f398e40ae1a9540289647ff53b88b WatchSource:0}: Error finding container d16b915ec888fb8eb81120578afb08b4001f398e40ae1a9540289647ff53b88b: Status 404 returned error can't find the container with id d16b915ec888fb8eb81120578afb08b4001f398e40ae1a9540289647ff53b88b Oct 09 09:20:51 crc kubenswrapper[4710]: I1009 09:20:51.883710 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-78c6b597d-zsdxm"] Oct 09 09:20:51 crc kubenswrapper[4710]: W1009 09:20:51.890862 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad719353_0773_432e_921f_a1480314f1c2.slice/crio-737ee9a3d8e275b67d820f5baf0a42dc0ca60a7142aefdc35c61f5b7b7a5e6e4 WatchSource:0}: Error finding container 737ee9a3d8e275b67d820f5baf0a42dc0ca60a7142aefdc35c61f5b7b7a5e6e4: Status 404 returned error can't find the container with id 737ee9a3d8e275b67d820f5baf0a42dc0ca60a7142aefdc35c61f5b7b7a5e6e4 Oct 09 09:20:51 crc kubenswrapper[4710]: I1009 09:20:51.906616 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6b5698f678-pqrnp"] Oct 09 09:20:51 crc kubenswrapper[4710]: I1009 09:20:51.934962 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.031170 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-dns-svc\") pod \"c22525fc-cc11-40ce-9f18-c59e860269ba\" (UID: \"c22525fc-cc11-40ce-9f18-c59e860269ba\") " Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.031250 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-ovsdbserver-sb\") pod \"c22525fc-cc11-40ce-9f18-c59e860269ba\" (UID: \"c22525fc-cc11-40ce-9f18-c59e860269ba\") " Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.031301 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-ovsdbserver-nb\") pod \"c22525fc-cc11-40ce-9f18-c59e860269ba\" (UID: \"c22525fc-cc11-40ce-9f18-c59e860269ba\") " Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.031340 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zt88v\" (UniqueName: \"kubernetes.io/projected/c22525fc-cc11-40ce-9f18-c59e860269ba-kube-api-access-zt88v\") pod \"c22525fc-cc11-40ce-9f18-c59e860269ba\" (UID: \"c22525fc-cc11-40ce-9f18-c59e860269ba\") " Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.031376 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-config\") pod \"c22525fc-cc11-40ce-9f18-c59e860269ba\" (UID: \"c22525fc-cc11-40ce-9f18-c59e860269ba\") " Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.042931 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c22525fc-cc11-40ce-9f18-c59e860269ba-kube-api-access-zt88v" (OuterVolumeSpecName: "kube-api-access-zt88v") pod "c22525fc-cc11-40ce-9f18-c59e860269ba" (UID: "c22525fc-cc11-40ce-9f18-c59e860269ba"). InnerVolumeSpecName "kube-api-access-zt88v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.072087 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-78c6b597d-zsdxm" event={"ID":"ad719353-0773-432e-921f-a1480314f1c2","Type":"ContainerStarted","Data":"737ee9a3d8e275b67d820f5baf0a42dc0ca60a7142aefdc35c61f5b7b7a5e6e4"} Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.075207 4710 generic.go:334] "Generic (PLEG): container finished" podID="c22525fc-cc11-40ce-9f18-c59e860269ba" containerID="33ad3f5bbcbe8e274420f9daca82f696b1a77ed86a8b58d48438bd26f46b904f" exitCode=0 Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.075287 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" event={"ID":"c22525fc-cc11-40ce-9f18-c59e860269ba","Type":"ContainerDied","Data":"33ad3f5bbcbe8e274420f9daca82f696b1a77ed86a8b58d48438bd26f46b904f"} Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.075309 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" event={"ID":"c22525fc-cc11-40ce-9f18-c59e860269ba","Type":"ContainerDied","Data":"b9a6d12103db1ba1a394a9ce4a0520856659685f45b867389d088e1415339d6d"} Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.075328 4710 scope.go:117] "RemoveContainer" containerID="33ad3f5bbcbe8e274420f9daca82f696b1a77ed86a8b58d48438bd26f46b904f" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.075585 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d7d647849-8j4tk" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.079635 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6b5698f678-pqrnp" event={"ID":"5a03d872-b139-414d-a62f-953c23fb01a6","Type":"ContainerStarted","Data":"b286465d89d8d1836710d11618ee4b7c45c8961fd72edfadf5df6f19f6ab742c"} Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.081993 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" event={"ID":"86ca38b0-140d-419f-9a96-6c9b3825a1ea","Type":"ContainerStarted","Data":"fb1060b3f95dacaaba49d3c09376861b1e78e4cf6463f9a4bd6428a39109951a"} Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.082022 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" event={"ID":"86ca38b0-140d-419f-9a96-6c9b3825a1ea","Type":"ContainerStarted","Data":"d16b915ec888fb8eb81120578afb08b4001f398e40ae1a9540289647ff53b88b"} Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.088634 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-768bd74b7c-lmgpx" event={"ID":"c369359e-6e4c-478b-8ef5-0ebd384acbd8","Type":"ContainerStarted","Data":"4e08578c3d9392f8db2840872d539a5b5f467287e620aefd00da4bc0c12b2fa3"} Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.088769 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c22525fc-cc11-40ce-9f18-c59e860269ba" (UID: "c22525fc-cc11-40ce-9f18-c59e860269ba"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.109397 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c22525fc-cc11-40ce-9f18-c59e860269ba" (UID: "c22525fc-cc11-40ce-9f18-c59e860269ba"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.112375 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-config" (OuterVolumeSpecName: "config") pod "c22525fc-cc11-40ce-9f18-c59e860269ba" (UID: "c22525fc-cc11-40ce-9f18-c59e860269ba"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.116267 4710 scope.go:117] "RemoveContainer" containerID="68137071bf3d95c6709b65aaf0e46c371a5beaa944d54e4ba7a68eb34e29f835" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.116812 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c22525fc-cc11-40ce-9f18-c59e860269ba" (UID: "c22525fc-cc11-40ce-9f18-c59e860269ba"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.132889 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.132912 4710 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.132922 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.132931 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c22525fc-cc11-40ce-9f18-c59e860269ba-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.132940 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zt88v\" (UniqueName: \"kubernetes.io/projected/c22525fc-cc11-40ce-9f18-c59e860269ba-kube-api-access-zt88v\") on node \"crc\" DevicePath \"\"" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.137264 4710 scope.go:117] "RemoveContainer" containerID="33ad3f5bbcbe8e274420f9daca82f696b1a77ed86a8b58d48438bd26f46b904f" Oct 09 09:20:52 crc kubenswrapper[4710]: E1009 09:20:52.138025 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33ad3f5bbcbe8e274420f9daca82f696b1a77ed86a8b58d48438bd26f46b904f\": container with ID starting with 33ad3f5bbcbe8e274420f9daca82f696b1a77ed86a8b58d48438bd26f46b904f not found: ID does not exist" containerID="33ad3f5bbcbe8e274420f9daca82f696b1a77ed86a8b58d48438bd26f46b904f" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.138060 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33ad3f5bbcbe8e274420f9daca82f696b1a77ed86a8b58d48438bd26f46b904f"} err="failed to get container status \"33ad3f5bbcbe8e274420f9daca82f696b1a77ed86a8b58d48438bd26f46b904f\": rpc error: code = NotFound desc = could not find container \"33ad3f5bbcbe8e274420f9daca82f696b1a77ed86a8b58d48438bd26f46b904f\": container with ID starting with 33ad3f5bbcbe8e274420f9daca82f696b1a77ed86a8b58d48438bd26f46b904f not found: ID does not exist" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.138082 4710 scope.go:117] "RemoveContainer" containerID="68137071bf3d95c6709b65aaf0e46c371a5beaa944d54e4ba7a68eb34e29f835" Oct 09 09:20:52 crc kubenswrapper[4710]: E1009 09:20:52.138574 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68137071bf3d95c6709b65aaf0e46c371a5beaa944d54e4ba7a68eb34e29f835\": container with ID starting with 68137071bf3d95c6709b65aaf0e46c371a5beaa944d54e4ba7a68eb34e29f835 not found: ID does not exist" containerID="68137071bf3d95c6709b65aaf0e46c371a5beaa944d54e4ba7a68eb34e29f835" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.138615 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68137071bf3d95c6709b65aaf0e46c371a5beaa944d54e4ba7a68eb34e29f835"} err="failed to get container status \"68137071bf3d95c6709b65aaf0e46c371a5beaa944d54e4ba7a68eb34e29f835\": rpc error: code = NotFound desc = could not find container \"68137071bf3d95c6709b65aaf0e46c371a5beaa944d54e4ba7a68eb34e29f835\": container with ID starting with 68137071bf3d95c6709b65aaf0e46c371a5beaa944d54e4ba7a68eb34e29f835 not found: ID does not exist" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.413250 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d7d647849-8j4tk"] Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.420160 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6d7d647849-8j4tk"] Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.824956 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c22525fc-cc11-40ce-9f18-c59e860269ba" path="/var/lib/kubelet/pods/c22525fc-cc11-40ce-9f18-c59e860269ba/volumes" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.867717 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7c69bd8f44-7wmj7"] Oct 09 09:20:52 crc kubenswrapper[4710]: E1009 09:20:52.869567 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c22525fc-cc11-40ce-9f18-c59e860269ba" containerName="dnsmasq-dns" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.869657 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="c22525fc-cc11-40ce-9f18-c59e860269ba" containerName="dnsmasq-dns" Oct 09 09:20:52 crc kubenswrapper[4710]: E1009 09:20:52.869723 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c22525fc-cc11-40ce-9f18-c59e860269ba" containerName="init" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.869785 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="c22525fc-cc11-40ce-9f18-c59e860269ba" containerName="init" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.870026 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="c22525fc-cc11-40ce-9f18-c59e860269ba" containerName="dnsmasq-dns" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.875447 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.878713 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.878995 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Oct 09 09:20:52 crc kubenswrapper[4710]: I1009 09:20:52.896220 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7c69bd8f44-7wmj7"] Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.050922 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d11eb962-d716-4dcf-9ec6-f82e6969640f-config-data-custom\") pod \"barbican-api-7c69bd8f44-7wmj7\" (UID: \"d11eb962-d716-4dcf-9ec6-f82e6969640f\") " pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.051006 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d11eb962-d716-4dcf-9ec6-f82e6969640f-logs\") pod \"barbican-api-7c69bd8f44-7wmj7\" (UID: \"d11eb962-d716-4dcf-9ec6-f82e6969640f\") " pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.051309 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d11eb962-d716-4dcf-9ec6-f82e6969640f-internal-tls-certs\") pod \"barbican-api-7c69bd8f44-7wmj7\" (UID: \"d11eb962-d716-4dcf-9ec6-f82e6969640f\") " pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.051354 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d11eb962-d716-4dcf-9ec6-f82e6969640f-public-tls-certs\") pod \"barbican-api-7c69bd8f44-7wmj7\" (UID: \"d11eb962-d716-4dcf-9ec6-f82e6969640f\") " pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.051412 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d11eb962-d716-4dcf-9ec6-f82e6969640f-config-data\") pod \"barbican-api-7c69bd8f44-7wmj7\" (UID: \"d11eb962-d716-4dcf-9ec6-f82e6969640f\") " pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.051472 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d11eb962-d716-4dcf-9ec6-f82e6969640f-combined-ca-bundle\") pod \"barbican-api-7c69bd8f44-7wmj7\" (UID: \"d11eb962-d716-4dcf-9ec6-f82e6969640f\") " pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.051523 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5zvm\" (UniqueName: \"kubernetes.io/projected/d11eb962-d716-4dcf-9ec6-f82e6969640f-kube-api-access-c5zvm\") pod \"barbican-api-7c69bd8f44-7wmj7\" (UID: \"d11eb962-d716-4dcf-9ec6-f82e6969640f\") " pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.099758 4710 generic.go:334] "Generic (PLEG): container finished" podID="86ca38b0-140d-419f-9a96-6c9b3825a1ea" containerID="fb1060b3f95dacaaba49d3c09376861b1e78e4cf6463f9a4bd6428a39109951a" exitCode=0 Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.100014 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" event={"ID":"86ca38b0-140d-419f-9a96-6c9b3825a1ea","Type":"ContainerDied","Data":"fb1060b3f95dacaaba49d3c09376861b1e78e4cf6463f9a4bd6428a39109951a"} Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.100099 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" event={"ID":"86ca38b0-140d-419f-9a96-6c9b3825a1ea","Type":"ContainerStarted","Data":"a7b05d7dd1c08ff109fc3de6efee93c90bc3904a19ca8249e59899f5a4e1f924"} Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.100529 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.104301 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-78c6b597d-zsdxm" event={"ID":"ad719353-0773-432e-921f-a1480314f1c2","Type":"ContainerStarted","Data":"ea1fd699d6d3b22a5512d4e8c55da578c00f8e02823b707b8341a3423bccd604"} Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.104338 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-78c6b597d-zsdxm" event={"ID":"ad719353-0773-432e-921f-a1480314f1c2","Type":"ContainerStarted","Data":"b9b577012a27a5f084d8b20bda3e97fff33b6983215e963f3fd3596d4bc021aa"} Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.105217 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.105263 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.122916 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" podStartSLOduration=3.122890254 podStartE2EDuration="3.122890254s" podCreationTimestamp="2025-10-09 09:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:20:53.11657226 +0000 UTC m=+976.606680656" watchObservedRunningTime="2025-10-09 09:20:53.122890254 +0000 UTC m=+976.612998651" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.133581 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-78c6b597d-zsdxm" podStartSLOduration=3.133558872 podStartE2EDuration="3.133558872s" podCreationTimestamp="2025-10-09 09:20:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:20:53.13041831 +0000 UTC m=+976.620526707" watchObservedRunningTime="2025-10-09 09:20:53.133558872 +0000 UTC m=+976.623667269" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.155988 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d11eb962-d716-4dcf-9ec6-f82e6969640f-config-data-custom\") pod \"barbican-api-7c69bd8f44-7wmj7\" (UID: \"d11eb962-d716-4dcf-9ec6-f82e6969640f\") " pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.156081 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d11eb962-d716-4dcf-9ec6-f82e6969640f-logs\") pod \"barbican-api-7c69bd8f44-7wmj7\" (UID: \"d11eb962-d716-4dcf-9ec6-f82e6969640f\") " pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.156152 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d11eb962-d716-4dcf-9ec6-f82e6969640f-internal-tls-certs\") pod \"barbican-api-7c69bd8f44-7wmj7\" (UID: \"d11eb962-d716-4dcf-9ec6-f82e6969640f\") " pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.156201 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d11eb962-d716-4dcf-9ec6-f82e6969640f-public-tls-certs\") pod \"barbican-api-7c69bd8f44-7wmj7\" (UID: \"d11eb962-d716-4dcf-9ec6-f82e6969640f\") " pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.156294 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d11eb962-d716-4dcf-9ec6-f82e6969640f-config-data\") pod \"barbican-api-7c69bd8f44-7wmj7\" (UID: \"d11eb962-d716-4dcf-9ec6-f82e6969640f\") " pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.156364 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d11eb962-d716-4dcf-9ec6-f82e6969640f-combined-ca-bundle\") pod \"barbican-api-7c69bd8f44-7wmj7\" (UID: \"d11eb962-d716-4dcf-9ec6-f82e6969640f\") " pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.156462 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5zvm\" (UniqueName: \"kubernetes.io/projected/d11eb962-d716-4dcf-9ec6-f82e6969640f-kube-api-access-c5zvm\") pod \"barbican-api-7c69bd8f44-7wmj7\" (UID: \"d11eb962-d716-4dcf-9ec6-f82e6969640f\") " pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.158377 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d11eb962-d716-4dcf-9ec6-f82e6969640f-logs\") pod \"barbican-api-7c69bd8f44-7wmj7\" (UID: \"d11eb962-d716-4dcf-9ec6-f82e6969640f\") " pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.163398 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d11eb962-d716-4dcf-9ec6-f82e6969640f-config-data-custom\") pod \"barbican-api-7c69bd8f44-7wmj7\" (UID: \"d11eb962-d716-4dcf-9ec6-f82e6969640f\") " pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.169588 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d11eb962-d716-4dcf-9ec6-f82e6969640f-config-data\") pod \"barbican-api-7c69bd8f44-7wmj7\" (UID: \"d11eb962-d716-4dcf-9ec6-f82e6969640f\") " pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.169989 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d11eb962-d716-4dcf-9ec6-f82e6969640f-combined-ca-bundle\") pod \"barbican-api-7c69bd8f44-7wmj7\" (UID: \"d11eb962-d716-4dcf-9ec6-f82e6969640f\") " pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.170370 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d11eb962-d716-4dcf-9ec6-f82e6969640f-public-tls-certs\") pod \"barbican-api-7c69bd8f44-7wmj7\" (UID: \"d11eb962-d716-4dcf-9ec6-f82e6969640f\") " pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.172853 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d11eb962-d716-4dcf-9ec6-f82e6969640f-internal-tls-certs\") pod \"barbican-api-7c69bd8f44-7wmj7\" (UID: \"d11eb962-d716-4dcf-9ec6-f82e6969640f\") " pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.187977 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5zvm\" (UniqueName: \"kubernetes.io/projected/d11eb962-d716-4dcf-9ec6-f82e6969640f-kube-api-access-c5zvm\") pod \"barbican-api-7c69bd8f44-7wmj7\" (UID: \"d11eb962-d716-4dcf-9ec6-f82e6969640f\") " pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:53 crc kubenswrapper[4710]: I1009 09:20:53.203991 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:57 crc kubenswrapper[4710]: I1009 09:20:57.679818 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7c69bd8f44-7wmj7"] Oct 09 09:20:58 crc kubenswrapper[4710]: I1009 09:20:58.176579 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-768bd74b7c-lmgpx" event={"ID":"c369359e-6e4c-478b-8ef5-0ebd384acbd8","Type":"ContainerStarted","Data":"030e533b0ad563850b5985f5f74ef1885f25e2e52e0f457bcf592b311f4d0b4d"} Oct 09 09:20:58 crc kubenswrapper[4710]: I1009 09:20:58.177621 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-768bd74b7c-lmgpx" event={"ID":"c369359e-6e4c-478b-8ef5-0ebd384acbd8","Type":"ContainerStarted","Data":"0c4cd4413437c03aaece000a7c09ea93a2f08da5be380d7e5439f18aee3c6b73"} Oct 09 09:20:58 crc kubenswrapper[4710]: I1009 09:20:58.186091 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a28920d-7c53-4e95-990d-d7499229899f","Type":"ContainerStarted","Data":"d53c1ecaa60a9a8b9724de2b7a0ee97b6cf6e6063bf388a15861c2e018804e4e"} Oct 09 09:20:58 crc kubenswrapper[4710]: I1009 09:20:58.186387 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2a28920d-7c53-4e95-990d-d7499229899f" containerName="ceilometer-central-agent" containerID="cri-o://acc54a4a3f2f93c817365f7d6b06460895f262559e67eb200ec7ffbcc2b25926" gracePeriod=30 Oct 09 09:20:58 crc kubenswrapper[4710]: I1009 09:20:58.186760 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 09 09:20:58 crc kubenswrapper[4710]: I1009 09:20:58.186797 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2a28920d-7c53-4e95-990d-d7499229899f" containerName="proxy-httpd" containerID="cri-o://d53c1ecaa60a9a8b9724de2b7a0ee97b6cf6e6063bf388a15861c2e018804e4e" gracePeriod=30 Oct 09 09:20:58 crc kubenswrapper[4710]: I1009 09:20:58.186814 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2a28920d-7c53-4e95-990d-d7499229899f" containerName="sg-core" containerID="cri-o://c3e837075ab7aca4ed7966170ec68b220aa7b909c7b9372b457ca94f1d8fcc34" gracePeriod=30 Oct 09 09:20:58 crc kubenswrapper[4710]: I1009 09:20:58.186827 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2a28920d-7c53-4e95-990d-d7499229899f" containerName="ceilometer-notification-agent" containerID="cri-o://3fa7718185235f8cbfbd51f1953b57efb9f226c640f87ac4e069445cc1143fe3" gracePeriod=30 Oct 09 09:20:58 crc kubenswrapper[4710]: I1009 09:20:58.197634 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c69bd8f44-7wmj7" event={"ID":"d11eb962-d716-4dcf-9ec6-f82e6969640f","Type":"ContainerStarted","Data":"5dd72b6c84b2b292bce9bc77b5b6f153247561ead49495996f394a76f9463ce3"} Oct 09 09:20:58 crc kubenswrapper[4710]: I1009 09:20:58.197688 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c69bd8f44-7wmj7" event={"ID":"d11eb962-d716-4dcf-9ec6-f82e6969640f","Type":"ContainerStarted","Data":"39d3fd42e7fed89a0781ab552de6b6ddf7b84c8e08a607ae15a27ad1a183c72f"} Oct 09 09:20:58 crc kubenswrapper[4710]: I1009 09:20:58.197701 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7c69bd8f44-7wmj7" event={"ID":"d11eb962-d716-4dcf-9ec6-f82e6969640f","Type":"ContainerStarted","Data":"8ad892d0bfcb08cfb197435859c13d31f0b407f55bd1c64fd77e705380fc26f7"} Oct 09 09:20:58 crc kubenswrapper[4710]: I1009 09:20:58.198413 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:58 crc kubenswrapper[4710]: I1009 09:20:58.198531 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:20:58 crc kubenswrapper[4710]: I1009 09:20:58.198687 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-768bd74b7c-lmgpx" podStartSLOduration=2.259447447 podStartE2EDuration="8.19867772s" podCreationTimestamp="2025-10-09 09:20:50 +0000 UTC" firstStartedPulling="2025-10-09 09:20:51.795579078 +0000 UTC m=+975.285687475" lastFinishedPulling="2025-10-09 09:20:57.734809351 +0000 UTC m=+981.224917748" observedRunningTime="2025-10-09 09:20:58.198194158 +0000 UTC m=+981.688302555" watchObservedRunningTime="2025-10-09 09:20:58.19867772 +0000 UTC m=+981.688786117" Oct 09 09:20:58 crc kubenswrapper[4710]: I1009 09:20:58.218406 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7c69bd8f44-7wmj7" podStartSLOduration=6.218387257 podStartE2EDuration="6.218387257s" podCreationTimestamp="2025-10-09 09:20:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:20:58.216806987 +0000 UTC m=+981.706915384" watchObservedRunningTime="2025-10-09 09:20:58.218387257 +0000 UTC m=+981.708495654" Oct 09 09:20:58 crc kubenswrapper[4710]: I1009 09:20:58.251898 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.022566122 podStartE2EDuration="47.251877921s" podCreationTimestamp="2025-10-09 09:20:11 +0000 UTC" firstStartedPulling="2025-10-09 09:20:12.507097618 +0000 UTC m=+935.997206006" lastFinishedPulling="2025-10-09 09:20:57.736409408 +0000 UTC m=+981.226517805" observedRunningTime="2025-10-09 09:20:58.249131843 +0000 UTC m=+981.739240240" watchObservedRunningTime="2025-10-09 09:20:58.251877921 +0000 UTC m=+981.741986318" Oct 09 09:20:58 crc kubenswrapper[4710]: I1009 09:20:58.814054 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:58 crc kubenswrapper[4710]: I1009 09:20:58.895018 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-685c5ffc88-49dzx" Oct 09 09:20:59 crc kubenswrapper[4710]: I1009 09:20:59.233672 4710 generic.go:334] "Generic (PLEG): container finished" podID="2a28920d-7c53-4e95-990d-d7499229899f" containerID="d53c1ecaa60a9a8b9724de2b7a0ee97b6cf6e6063bf388a15861c2e018804e4e" exitCode=0 Oct 09 09:20:59 crc kubenswrapper[4710]: I1009 09:20:59.233710 4710 generic.go:334] "Generic (PLEG): container finished" podID="2a28920d-7c53-4e95-990d-d7499229899f" containerID="c3e837075ab7aca4ed7966170ec68b220aa7b909c7b9372b457ca94f1d8fcc34" exitCode=2 Oct 09 09:20:59 crc kubenswrapper[4710]: I1009 09:20:59.233720 4710 generic.go:334] "Generic (PLEG): container finished" podID="2a28920d-7c53-4e95-990d-d7499229899f" containerID="acc54a4a3f2f93c817365f7d6b06460895f262559e67eb200ec7ffbcc2b25926" exitCode=0 Oct 09 09:20:59 crc kubenswrapper[4710]: I1009 09:20:59.233907 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a28920d-7c53-4e95-990d-d7499229899f","Type":"ContainerDied","Data":"d53c1ecaa60a9a8b9724de2b7a0ee97b6cf6e6063bf388a15861c2e018804e4e"} Oct 09 09:20:59 crc kubenswrapper[4710]: I1009 09:20:59.233980 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a28920d-7c53-4e95-990d-d7499229899f","Type":"ContainerDied","Data":"c3e837075ab7aca4ed7966170ec68b220aa7b909c7b9372b457ca94f1d8fcc34"} Oct 09 09:20:59 crc kubenswrapper[4710]: I1009 09:20:59.233994 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a28920d-7c53-4e95-990d-d7499229899f","Type":"ContainerDied","Data":"acc54a4a3f2f93c817365f7d6b06460895f262559e67eb200ec7ffbcc2b25926"} Oct 09 09:21:00 crc kubenswrapper[4710]: I1009 09:21:00.242971 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6b5698f678-pqrnp" event={"ID":"5a03d872-b139-414d-a62f-953c23fb01a6","Type":"ContainerStarted","Data":"c0467fa69a18a0822691a78dcf63f07871f4e54ff8643febfc7309cfe1fcdcc4"} Oct 09 09:21:00 crc kubenswrapper[4710]: I1009 09:21:00.243359 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6b5698f678-pqrnp" event={"ID":"5a03d872-b139-414d-a62f-953c23fb01a6","Type":"ContainerStarted","Data":"65e70eb8881156723dc14635fe2e5200c6471519ddc62ec4da1258934529d940"} Oct 09 09:21:00 crc kubenswrapper[4710]: I1009 09:21:00.268079 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-6b5698f678-pqrnp" podStartSLOduration=3.037571217 podStartE2EDuration="10.268054079s" podCreationTimestamp="2025-10-09 09:20:50 +0000 UTC" firstStartedPulling="2025-10-09 09:20:51.915375881 +0000 UTC m=+975.405484279" lastFinishedPulling="2025-10-09 09:20:59.145858744 +0000 UTC m=+982.635967141" observedRunningTime="2025-10-09 09:21:00.26491082 +0000 UTC m=+983.755019217" watchObservedRunningTime="2025-10-09 09:21:00.268054079 +0000 UTC m=+983.758162475" Oct 09 09:21:00 crc kubenswrapper[4710]: I1009 09:21:00.780613 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" Oct 09 09:21:00 crc kubenswrapper[4710]: I1009 09:21:00.844723 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-748d7644cf-5dqfk"] Oct 09 09:21:00 crc kubenswrapper[4710]: I1009 09:21:00.845354 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" podUID="0181c67b-615b-4354-b053-8effb7ab7fab" containerName="dnsmasq-dns" containerID="cri-o://946ada98953ca1e535b42950d4ab730001c4879dc1184cd7fabaaee698770a1d" gracePeriod=10 Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.272515 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.299667 4710 generic.go:334] "Generic (PLEG): container finished" podID="0181c67b-615b-4354-b053-8effb7ab7fab" containerID="946ada98953ca1e535b42950d4ab730001c4879dc1184cd7fabaaee698770a1d" exitCode=0 Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.299751 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" event={"ID":"0181c67b-615b-4354-b053-8effb7ab7fab","Type":"ContainerDied","Data":"946ada98953ca1e535b42950d4ab730001c4879dc1184cd7fabaaee698770a1d"} Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.299783 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" event={"ID":"0181c67b-615b-4354-b053-8effb7ab7fab","Type":"ContainerDied","Data":"fa014dc060821784ed6b82bb797781e5d858c5e43d287bc38a339477eddfdd62"} Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.299800 4710 scope.go:117] "RemoveContainer" containerID="946ada98953ca1e535b42950d4ab730001c4879dc1184cd7fabaaee698770a1d" Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.299934 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-748d7644cf-5dqfk" Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.312263 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-rmqx2" event={"ID":"c9e9fec6-00af-46a7-9a1f-a59b6b06969c","Type":"ContainerStarted","Data":"20b7d5d1ea2e8d027f6ac1a980a3024e1871dfcb2d77a3f4d99ff6abf6414b1c"} Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.333113 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-rmqx2" podStartSLOduration=7.052626812 podStartE2EDuration="45.333089985s" podCreationTimestamp="2025-10-09 09:20:16 +0000 UTC" firstStartedPulling="2025-10-09 09:20:22.168363342 +0000 UTC m=+945.658471739" lastFinishedPulling="2025-10-09 09:21:00.448826515 +0000 UTC m=+983.938934912" observedRunningTime="2025-10-09 09:21:01.327158318 +0000 UTC m=+984.817266716" watchObservedRunningTime="2025-10-09 09:21:01.333089985 +0000 UTC m=+984.823198382" Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.334455 4710 scope.go:117] "RemoveContainer" containerID="f7f3facd1ba76cf0d3c42102e0dc594323c1c9167de689cd1140371c34714593" Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.365831 4710 scope.go:117] "RemoveContainer" containerID="946ada98953ca1e535b42950d4ab730001c4879dc1184cd7fabaaee698770a1d" Oct 09 09:21:01 crc kubenswrapper[4710]: E1009 09:21:01.367563 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"946ada98953ca1e535b42950d4ab730001c4879dc1184cd7fabaaee698770a1d\": container with ID starting with 946ada98953ca1e535b42950d4ab730001c4879dc1184cd7fabaaee698770a1d not found: ID does not exist" containerID="946ada98953ca1e535b42950d4ab730001c4879dc1184cd7fabaaee698770a1d" Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.367607 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"946ada98953ca1e535b42950d4ab730001c4879dc1184cd7fabaaee698770a1d"} err="failed to get container status \"946ada98953ca1e535b42950d4ab730001c4879dc1184cd7fabaaee698770a1d\": rpc error: code = NotFound desc = could not find container \"946ada98953ca1e535b42950d4ab730001c4879dc1184cd7fabaaee698770a1d\": container with ID starting with 946ada98953ca1e535b42950d4ab730001c4879dc1184cd7fabaaee698770a1d not found: ID does not exist" Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.367629 4710 scope.go:117] "RemoveContainer" containerID="f7f3facd1ba76cf0d3c42102e0dc594323c1c9167de689cd1140371c34714593" Oct 09 09:21:01 crc kubenswrapper[4710]: E1009 09:21:01.367992 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7f3facd1ba76cf0d3c42102e0dc594323c1c9167de689cd1140371c34714593\": container with ID starting with f7f3facd1ba76cf0d3c42102e0dc594323c1c9167de689cd1140371c34714593 not found: ID does not exist" containerID="f7f3facd1ba76cf0d3c42102e0dc594323c1c9167de689cd1140371c34714593" Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.368011 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7f3facd1ba76cf0d3c42102e0dc594323c1c9167de689cd1140371c34714593"} err="failed to get container status \"f7f3facd1ba76cf0d3c42102e0dc594323c1c9167de689cd1140371c34714593\": rpc error: code = NotFound desc = could not find container \"f7f3facd1ba76cf0d3c42102e0dc594323c1c9167de689cd1140371c34714593\": container with ID starting with f7f3facd1ba76cf0d3c42102e0dc594323c1c9167de689cd1140371c34714593 not found: ID does not exist" Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.401198 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-ovsdbserver-nb\") pod \"0181c67b-615b-4354-b053-8effb7ab7fab\" (UID: \"0181c67b-615b-4354-b053-8effb7ab7fab\") " Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.401271 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-config\") pod \"0181c67b-615b-4354-b053-8effb7ab7fab\" (UID: \"0181c67b-615b-4354-b053-8effb7ab7fab\") " Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.401527 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-ovsdbserver-sb\") pod \"0181c67b-615b-4354-b053-8effb7ab7fab\" (UID: \"0181c67b-615b-4354-b053-8effb7ab7fab\") " Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.401605 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p4sbb\" (UniqueName: \"kubernetes.io/projected/0181c67b-615b-4354-b053-8effb7ab7fab-kube-api-access-p4sbb\") pod \"0181c67b-615b-4354-b053-8effb7ab7fab\" (UID: \"0181c67b-615b-4354-b053-8effb7ab7fab\") " Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.401737 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-dns-svc\") pod \"0181c67b-615b-4354-b053-8effb7ab7fab\" (UID: \"0181c67b-615b-4354-b053-8effb7ab7fab\") " Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.418593 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0181c67b-615b-4354-b053-8effb7ab7fab-kube-api-access-p4sbb" (OuterVolumeSpecName: "kube-api-access-p4sbb") pod "0181c67b-615b-4354-b053-8effb7ab7fab" (UID: "0181c67b-615b-4354-b053-8effb7ab7fab"). InnerVolumeSpecName "kube-api-access-p4sbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.459029 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0181c67b-615b-4354-b053-8effb7ab7fab" (UID: "0181c67b-615b-4354-b053-8effb7ab7fab"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.475049 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-config" (OuterVolumeSpecName: "config") pod "0181c67b-615b-4354-b053-8effb7ab7fab" (UID: "0181c67b-615b-4354-b053-8effb7ab7fab"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.486994 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0181c67b-615b-4354-b053-8effb7ab7fab" (UID: "0181c67b-615b-4354-b053-8effb7ab7fab"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.503913 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.503944 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p4sbb\" (UniqueName: \"kubernetes.io/projected/0181c67b-615b-4354-b053-8effb7ab7fab-kube-api-access-p4sbb\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.503956 4710 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.503966 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.511865 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0181c67b-615b-4354-b053-8effb7ab7fab" (UID: "0181c67b-615b-4354-b053-8effb7ab7fab"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.609864 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0181c67b-615b-4354-b053-8effb7ab7fab-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.664054 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-748d7644cf-5dqfk"] Oct 09 09:21:01 crc kubenswrapper[4710]: I1009 09:21:01.675543 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-748d7644cf-5dqfk"] Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.332288 4710 generic.go:334] "Generic (PLEG): container finished" podID="2a28920d-7c53-4e95-990d-d7499229899f" containerID="3fa7718185235f8cbfbd51f1953b57efb9f226c640f87ac4e069445cc1143fe3" exitCode=0 Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.332600 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a28920d-7c53-4e95-990d-d7499229899f","Type":"ContainerDied","Data":"3fa7718185235f8cbfbd51f1953b57efb9f226c640f87ac4e069445cc1143fe3"} Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.332633 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a28920d-7c53-4e95-990d-d7499229899f","Type":"ContainerDied","Data":"5f75592491efc617f54556bd7da0ef42211f0aa280caf323d01ebfef663fdcbc"} Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.332645 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f75592491efc617f54556bd7da0ef42211f0aa280caf323d01ebfef663fdcbc" Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.349954 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.431487 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a28920d-7c53-4e95-990d-d7499229899f-log-httpd\") pod \"2a28920d-7c53-4e95-990d-d7499229899f\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.431605 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mn6xx\" (UniqueName: \"kubernetes.io/projected/2a28920d-7c53-4e95-990d-d7499229899f-kube-api-access-mn6xx\") pod \"2a28920d-7c53-4e95-990d-d7499229899f\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.431632 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-sg-core-conf-yaml\") pod \"2a28920d-7c53-4e95-990d-d7499229899f\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.431703 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-config-data\") pod \"2a28920d-7c53-4e95-990d-d7499229899f\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.431773 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-scripts\") pod \"2a28920d-7c53-4e95-990d-d7499229899f\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.431795 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a28920d-7c53-4e95-990d-d7499229899f-run-httpd\") pod \"2a28920d-7c53-4e95-990d-d7499229899f\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.431865 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-combined-ca-bundle\") pod \"2a28920d-7c53-4e95-990d-d7499229899f\" (UID: \"2a28920d-7c53-4e95-990d-d7499229899f\") " Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.436170 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a28920d-7c53-4e95-990d-d7499229899f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2a28920d-7c53-4e95-990d-d7499229899f" (UID: "2a28920d-7c53-4e95-990d-d7499229899f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.439990 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-scripts" (OuterVolumeSpecName: "scripts") pod "2a28920d-7c53-4e95-990d-d7499229899f" (UID: "2a28920d-7c53-4e95-990d-d7499229899f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.441559 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a28920d-7c53-4e95-990d-d7499229899f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2a28920d-7c53-4e95-990d-d7499229899f" (UID: "2a28920d-7c53-4e95-990d-d7499229899f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.443581 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a28920d-7c53-4e95-990d-d7499229899f-kube-api-access-mn6xx" (OuterVolumeSpecName: "kube-api-access-mn6xx") pod "2a28920d-7c53-4e95-990d-d7499229899f" (UID: "2a28920d-7c53-4e95-990d-d7499229899f"). InnerVolumeSpecName "kube-api-access-mn6xx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.487135 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "2a28920d-7c53-4e95-990d-d7499229899f" (UID: "2a28920d-7c53-4e95-990d-d7499229899f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.525787 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2a28920d-7c53-4e95-990d-d7499229899f" (UID: "2a28920d-7c53-4e95-990d-d7499229899f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.533008 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-config-data" (OuterVolumeSpecName: "config-data") pod "2a28920d-7c53-4e95-990d-d7499229899f" (UID: "2a28920d-7c53-4e95-990d-d7499229899f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.534932 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.534966 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.534976 4710 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a28920d-7c53-4e95-990d-d7499229899f-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.534984 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.534998 4710 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a28920d-7c53-4e95-990d-d7499229899f-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.535007 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mn6xx\" (UniqueName: \"kubernetes.io/projected/2a28920d-7c53-4e95-990d-d7499229899f-kube-api-access-mn6xx\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.535016 4710 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2a28920d-7c53-4e95-990d-d7499229899f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.642019 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.727113 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:21:02 crc kubenswrapper[4710]: I1009 09:21:02.825578 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0181c67b-615b-4354-b053-8effb7ab7fab" path="/var/lib/kubelet/pods/0181c67b-615b-4354-b053-8effb7ab7fab/volumes" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.339937 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.363381 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.367187 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.395089 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:21:03 crc kubenswrapper[4710]: E1009 09:21:03.395656 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a28920d-7c53-4e95-990d-d7499229899f" containerName="sg-core" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.395682 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a28920d-7c53-4e95-990d-d7499229899f" containerName="sg-core" Oct 09 09:21:03 crc kubenswrapper[4710]: E1009 09:21:03.395702 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a28920d-7c53-4e95-990d-d7499229899f" containerName="proxy-httpd" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.395708 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a28920d-7c53-4e95-990d-d7499229899f" containerName="proxy-httpd" Oct 09 09:21:03 crc kubenswrapper[4710]: E1009 09:21:03.395719 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a28920d-7c53-4e95-990d-d7499229899f" containerName="ceilometer-notification-agent" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.395725 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a28920d-7c53-4e95-990d-d7499229899f" containerName="ceilometer-notification-agent" Oct 09 09:21:03 crc kubenswrapper[4710]: E1009 09:21:03.395751 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0181c67b-615b-4354-b053-8effb7ab7fab" containerName="dnsmasq-dns" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.395757 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="0181c67b-615b-4354-b053-8effb7ab7fab" containerName="dnsmasq-dns" Oct 09 09:21:03 crc kubenswrapper[4710]: E1009 09:21:03.395769 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0181c67b-615b-4354-b053-8effb7ab7fab" containerName="init" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.395776 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="0181c67b-615b-4354-b053-8effb7ab7fab" containerName="init" Oct 09 09:21:03 crc kubenswrapper[4710]: E1009 09:21:03.395785 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a28920d-7c53-4e95-990d-d7499229899f" containerName="ceilometer-central-agent" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.395791 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a28920d-7c53-4e95-990d-d7499229899f" containerName="ceilometer-central-agent" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.396000 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="0181c67b-615b-4354-b053-8effb7ab7fab" containerName="dnsmasq-dns" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.396010 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a28920d-7c53-4e95-990d-d7499229899f" containerName="proxy-httpd" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.396032 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a28920d-7c53-4e95-990d-d7499229899f" containerName="ceilometer-central-agent" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.396042 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a28920d-7c53-4e95-990d-d7499229899f" containerName="sg-core" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.396051 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a28920d-7c53-4e95-990d-d7499229899f" containerName="ceilometer-notification-agent" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.399452 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.402506 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.403057 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.433131 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.461595 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.461653 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-config-data\") pod \"ceilometer-0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.461769 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.461840 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a6d7475-ed84-4a7c-986e-9052c176eac0-log-httpd\") pod \"ceilometer-0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.462163 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-scripts\") pod \"ceilometer-0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.462262 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a6d7475-ed84-4a7c-986e-9052c176eac0-run-httpd\") pod \"ceilometer-0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.462492 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djd78\" (UniqueName: \"kubernetes.io/projected/2a6d7475-ed84-4a7c-986e-9052c176eac0-kube-api-access-djd78\") pod \"ceilometer-0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.564887 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a6d7475-ed84-4a7c-986e-9052c176eac0-run-httpd\") pod \"ceilometer-0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.564972 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djd78\" (UniqueName: \"kubernetes.io/projected/2a6d7475-ed84-4a7c-986e-9052c176eac0-kube-api-access-djd78\") pod \"ceilometer-0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.565079 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.565103 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-config-data\") pod \"ceilometer-0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.565173 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.565218 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a6d7475-ed84-4a7c-986e-9052c176eac0-log-httpd\") pod \"ceilometer-0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.565292 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-scripts\") pod \"ceilometer-0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.565471 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a6d7475-ed84-4a7c-986e-9052c176eac0-run-httpd\") pod \"ceilometer-0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.565790 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a6d7475-ed84-4a7c-986e-9052c176eac0-log-httpd\") pod \"ceilometer-0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.571180 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.576193 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-config-data\") pod \"ceilometer-0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.578284 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-scripts\") pod \"ceilometer-0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.579835 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.589983 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djd78\" (UniqueName: \"kubernetes.io/projected/2a6d7475-ed84-4a7c-986e-9052c176eac0-kube-api-access-djd78\") pod \"ceilometer-0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " pod="openstack/ceilometer-0" Oct 09 09:21:03 crc kubenswrapper[4710]: I1009 09:21:03.715000 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:21:04 crc kubenswrapper[4710]: I1009 09:21:04.170908 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-7746794c77-ljwdn" Oct 09 09:21:04 crc kubenswrapper[4710]: I1009 09:21:04.212149 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:21:04 crc kubenswrapper[4710]: I1009 09:21:04.351382 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a6d7475-ed84-4a7c-986e-9052c176eac0","Type":"ContainerStarted","Data":"bd44a810c477c63a594f47743efcc8c7e259b9cda93b04312278fb3605609769"} Oct 09 09:21:04 crc kubenswrapper[4710]: I1009 09:21:04.843763 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a28920d-7c53-4e95-990d-d7499229899f" path="/var/lib/kubelet/pods/2a28920d-7c53-4e95-990d-d7499229899f/volumes" Oct 09 09:21:04 crc kubenswrapper[4710]: I1009 09:21:04.864730 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:21:04 crc kubenswrapper[4710]: I1009 09:21:04.927580 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7c69bd8f44-7wmj7" Oct 09 09:21:04 crc kubenswrapper[4710]: I1009 09:21:04.974130 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 09 09:21:04 crc kubenswrapper[4710]: I1009 09:21:04.975489 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 09 09:21:04 crc kubenswrapper[4710]: I1009 09:21:04.978354 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-f4nzv" Oct 09 09:21:04 crc kubenswrapper[4710]: I1009 09:21:04.978541 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 09 09:21:04 crc kubenswrapper[4710]: I1009 09:21:04.989838 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.020974 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-78c6b597d-zsdxm"] Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.021265 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-78c6b597d-zsdxm" podUID="ad719353-0773-432e-921f-a1480314f1c2" containerName="barbican-api-log" containerID="cri-o://b9b577012a27a5f084d8b20bda3e97fff33b6983215e963f3fd3596d4bc021aa" gracePeriod=30 Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.021762 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-78c6b597d-zsdxm" podUID="ad719353-0773-432e-921f-a1480314f1c2" containerName="barbican-api" containerID="cri-o://ea1fd699d6d3b22a5512d4e8c55da578c00f8e02823b707b8341a3423bccd604" gracePeriod=30 Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.032490 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.098110 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/811d09f1-53e2-4617-8c12-62448ef1f729-combined-ca-bundle\") pod \"openstackclient\" (UID: \"811d09f1-53e2-4617-8c12-62448ef1f729\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.098199 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/811d09f1-53e2-4617-8c12-62448ef1f729-openstack-config-secret\") pod \"openstackclient\" (UID: \"811d09f1-53e2-4617-8c12-62448ef1f729\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.098337 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/811d09f1-53e2-4617-8c12-62448ef1f729-openstack-config\") pod \"openstackclient\" (UID: \"811d09f1-53e2-4617-8c12-62448ef1f729\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.098469 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-722rn\" (UniqueName: \"kubernetes.io/projected/811d09f1-53e2-4617-8c12-62448ef1f729-kube-api-access-722rn\") pod \"openstackclient\" (UID: \"811d09f1-53e2-4617-8c12-62448ef1f729\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.225887 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/811d09f1-53e2-4617-8c12-62448ef1f729-combined-ca-bundle\") pod \"openstackclient\" (UID: \"811d09f1-53e2-4617-8c12-62448ef1f729\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.225954 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/811d09f1-53e2-4617-8c12-62448ef1f729-openstack-config-secret\") pod \"openstackclient\" (UID: \"811d09f1-53e2-4617-8c12-62448ef1f729\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.226003 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/811d09f1-53e2-4617-8c12-62448ef1f729-openstack-config\") pod \"openstackclient\" (UID: \"811d09f1-53e2-4617-8c12-62448ef1f729\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.226036 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-722rn\" (UniqueName: \"kubernetes.io/projected/811d09f1-53e2-4617-8c12-62448ef1f729-kube-api-access-722rn\") pod \"openstackclient\" (UID: \"811d09f1-53e2-4617-8c12-62448ef1f729\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.233860 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/811d09f1-53e2-4617-8c12-62448ef1f729-openstack-config\") pod \"openstackclient\" (UID: \"811d09f1-53e2-4617-8c12-62448ef1f729\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.233964 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/811d09f1-53e2-4617-8c12-62448ef1f729-combined-ca-bundle\") pod \"openstackclient\" (UID: \"811d09f1-53e2-4617-8c12-62448ef1f729\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.236938 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/811d09f1-53e2-4617-8c12-62448ef1f729-openstack-config-secret\") pod \"openstackclient\" (UID: \"811d09f1-53e2-4617-8c12-62448ef1f729\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.250990 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-722rn\" (UniqueName: \"kubernetes.io/projected/811d09f1-53e2-4617-8c12-62448ef1f729-kube-api-access-722rn\") pod \"openstackclient\" (UID: \"811d09f1-53e2-4617-8c12-62448ef1f729\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.267486 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.268293 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.285225 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.302462 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.303405 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.318148 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.382752 4710 generic.go:334] "Generic (PLEG): container finished" podID="c9e9fec6-00af-46a7-9a1f-a59b6b06969c" containerID="20b7d5d1ea2e8d027f6ac1a980a3024e1871dfcb2d77a3f4d99ff6abf6414b1c" exitCode=0 Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.383828 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-rmqx2" event={"ID":"c9e9fec6-00af-46a7-9a1f-a59b6b06969c","Type":"ContainerDied","Data":"20b7d5d1ea2e8d027f6ac1a980a3024e1871dfcb2d77a3f4d99ff6abf6414b1c"} Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.400946 4710 generic.go:334] "Generic (PLEG): container finished" podID="ad719353-0773-432e-921f-a1480314f1c2" containerID="b9b577012a27a5f084d8b20bda3e97fff33b6983215e963f3fd3596d4bc021aa" exitCode=143 Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.401100 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-78c6b597d-zsdxm" event={"ID":"ad719353-0773-432e-921f-a1480314f1c2","Type":"ContainerDied","Data":"b9b577012a27a5f084d8b20bda3e97fff33b6983215e963f3fd3596d4bc021aa"} Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.429254 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6vp7\" (UniqueName: \"kubernetes.io/projected/b7779e6d-a6fb-4d03-8636-0dafb2767cbc-kube-api-access-n6vp7\") pod \"openstackclient\" (UID: \"b7779e6d-a6fb-4d03-8636-0dafb2767cbc\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.429397 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b7779e6d-a6fb-4d03-8636-0dafb2767cbc-openstack-config-secret\") pod \"openstackclient\" (UID: \"b7779e6d-a6fb-4d03-8636-0dafb2767cbc\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.429568 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7779e6d-a6fb-4d03-8636-0dafb2767cbc-combined-ca-bundle\") pod \"openstackclient\" (UID: \"b7779e6d-a6fb-4d03-8636-0dafb2767cbc\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.429642 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b7779e6d-a6fb-4d03-8636-0dafb2767cbc-openstack-config\") pod \"openstackclient\" (UID: \"b7779e6d-a6fb-4d03-8636-0dafb2767cbc\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: E1009 09:21:05.456237 4710 log.go:32] "RunPodSandbox from runtime service failed" err=< Oct 09 09:21:05 crc kubenswrapper[4710]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_811d09f1-53e2-4617-8c12-62448ef1f729_0(a8811696f6f8d98d92ea27ea973057c7764b7f54691577783ec6f5fe848d01c9): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"a8811696f6f8d98d92ea27ea973057c7764b7f54691577783ec6f5fe848d01c9" Netns:"/var/run/netns/aadf7096-3647-4d7b-91d1-c62c838a9174" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=a8811696f6f8d98d92ea27ea973057c7764b7f54691577783ec6f5fe848d01c9;K8S_POD_UID=811d09f1-53e2-4617-8c12-62448ef1f729" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/811d09f1-53e2-4617-8c12-62448ef1f729]: expected pod UID "811d09f1-53e2-4617-8c12-62448ef1f729" but got "b7779e6d-a6fb-4d03-8636-0dafb2767cbc" from Kube API Oct 09 09:21:05 crc kubenswrapper[4710]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Oct 09 09:21:05 crc kubenswrapper[4710]: > Oct 09 09:21:05 crc kubenswrapper[4710]: E1009 09:21:05.456325 4710 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Oct 09 09:21:05 crc kubenswrapper[4710]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_811d09f1-53e2-4617-8c12-62448ef1f729_0(a8811696f6f8d98d92ea27ea973057c7764b7f54691577783ec6f5fe848d01c9): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"a8811696f6f8d98d92ea27ea973057c7764b7f54691577783ec6f5fe848d01c9" Netns:"/var/run/netns/aadf7096-3647-4d7b-91d1-c62c838a9174" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=a8811696f6f8d98d92ea27ea973057c7764b7f54691577783ec6f5fe848d01c9;K8S_POD_UID=811d09f1-53e2-4617-8c12-62448ef1f729" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/811d09f1-53e2-4617-8c12-62448ef1f729]: expected pod UID "811d09f1-53e2-4617-8c12-62448ef1f729" but got "b7779e6d-a6fb-4d03-8636-0dafb2767cbc" from Kube API Oct 09 09:21:05 crc kubenswrapper[4710]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Oct 09 09:21:05 crc kubenswrapper[4710]: > pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.532922 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b7779e6d-a6fb-4d03-8636-0dafb2767cbc-openstack-config-secret\") pod \"openstackclient\" (UID: \"b7779e6d-a6fb-4d03-8636-0dafb2767cbc\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.533076 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7779e6d-a6fb-4d03-8636-0dafb2767cbc-combined-ca-bundle\") pod \"openstackclient\" (UID: \"b7779e6d-a6fb-4d03-8636-0dafb2767cbc\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.533118 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b7779e6d-a6fb-4d03-8636-0dafb2767cbc-openstack-config\") pod \"openstackclient\" (UID: \"b7779e6d-a6fb-4d03-8636-0dafb2767cbc\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.533204 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6vp7\" (UniqueName: \"kubernetes.io/projected/b7779e6d-a6fb-4d03-8636-0dafb2767cbc-kube-api-access-n6vp7\") pod \"openstackclient\" (UID: \"b7779e6d-a6fb-4d03-8636-0dafb2767cbc\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.535541 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b7779e6d-a6fb-4d03-8636-0dafb2767cbc-openstack-config\") pod \"openstackclient\" (UID: \"b7779e6d-a6fb-4d03-8636-0dafb2767cbc\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.538727 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7779e6d-a6fb-4d03-8636-0dafb2767cbc-combined-ca-bundle\") pod \"openstackclient\" (UID: \"b7779e6d-a6fb-4d03-8636-0dafb2767cbc\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.538914 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b7779e6d-a6fb-4d03-8636-0dafb2767cbc-openstack-config-secret\") pod \"openstackclient\" (UID: \"b7779e6d-a6fb-4d03-8636-0dafb2767cbc\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.550539 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6vp7\" (UniqueName: \"kubernetes.io/projected/b7779e6d-a6fb-4d03-8636-0dafb2767cbc-kube-api-access-n6vp7\") pod \"openstackclient\" (UID: \"b7779e6d-a6fb-4d03-8636-0dafb2767cbc\") " pod="openstack/openstackclient" Oct 09 09:21:05 crc kubenswrapper[4710]: I1009 09:21:05.655866 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.137070 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 09 09:21:06 crc kubenswrapper[4710]: W1009 09:21:06.137967 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb7779e6d_a6fb_4d03_8636_0dafb2767cbc.slice/crio-2cfd7765ecf56d45f90530c9eff727ad5c267b3a83b757dd4a7bf9d978c2426f WatchSource:0}: Error finding container 2cfd7765ecf56d45f90530c9eff727ad5c267b3a83b757dd4a7bf9d978c2426f: Status 404 returned error can't find the container with id 2cfd7765ecf56d45f90530c9eff727ad5c267b3a83b757dd4a7bf9d978c2426f Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.415139 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a6d7475-ed84-4a7c-986e-9052c176eac0","Type":"ContainerStarted","Data":"2dacd45e9146ad05466bc69726fd4c7841a6ed80df689390968d80b917787840"} Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.417472 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.417542 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"b7779e6d-a6fb-4d03-8636-0dafb2767cbc","Type":"ContainerStarted","Data":"2cfd7765ecf56d45f90530c9eff727ad5c267b3a83b757dd4a7bf9d978c2426f"} Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.425542 4710 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="811d09f1-53e2-4617-8c12-62448ef1f729" podUID="b7779e6d-a6fb-4d03-8636-0dafb2767cbc" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.452736 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.557083 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/811d09f1-53e2-4617-8c12-62448ef1f729-openstack-config\") pod \"811d09f1-53e2-4617-8c12-62448ef1f729\" (UID: \"811d09f1-53e2-4617-8c12-62448ef1f729\") " Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.557211 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/811d09f1-53e2-4617-8c12-62448ef1f729-openstack-config-secret\") pod \"811d09f1-53e2-4617-8c12-62448ef1f729\" (UID: \"811d09f1-53e2-4617-8c12-62448ef1f729\") " Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.557381 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-722rn\" (UniqueName: \"kubernetes.io/projected/811d09f1-53e2-4617-8c12-62448ef1f729-kube-api-access-722rn\") pod \"811d09f1-53e2-4617-8c12-62448ef1f729\" (UID: \"811d09f1-53e2-4617-8c12-62448ef1f729\") " Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.557405 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/811d09f1-53e2-4617-8c12-62448ef1f729-combined-ca-bundle\") pod \"811d09f1-53e2-4617-8c12-62448ef1f729\" (UID: \"811d09f1-53e2-4617-8c12-62448ef1f729\") " Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.563268 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/811d09f1-53e2-4617-8c12-62448ef1f729-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "811d09f1-53e2-4617-8c12-62448ef1f729" (UID: "811d09f1-53e2-4617-8c12-62448ef1f729"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.563941 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/811d09f1-53e2-4617-8c12-62448ef1f729-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "811d09f1-53e2-4617-8c12-62448ef1f729" (UID: "811d09f1-53e2-4617-8c12-62448ef1f729"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.566603 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/811d09f1-53e2-4617-8c12-62448ef1f729-kube-api-access-722rn" (OuterVolumeSpecName: "kube-api-access-722rn") pod "811d09f1-53e2-4617-8c12-62448ef1f729" (UID: "811d09f1-53e2-4617-8c12-62448ef1f729"). InnerVolumeSpecName "kube-api-access-722rn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.571620 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/811d09f1-53e2-4617-8c12-62448ef1f729-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "811d09f1-53e2-4617-8c12-62448ef1f729" (UID: "811d09f1-53e2-4617-8c12-62448ef1f729"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.660209 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-722rn\" (UniqueName: \"kubernetes.io/projected/811d09f1-53e2-4617-8c12-62448ef1f729-kube-api-access-722rn\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.660255 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/811d09f1-53e2-4617-8c12-62448ef1f729-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.660267 4710 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/811d09f1-53e2-4617-8c12-62448ef1f729-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.660276 4710 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/811d09f1-53e2-4617-8c12-62448ef1f729-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.777808 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-rmqx2" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.823766 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="811d09f1-53e2-4617-8c12-62448ef1f729" path="/var/lib/kubelet/pods/811d09f1-53e2-4617-8c12-62448ef1f729/volumes" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.862827 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-combined-ca-bundle\") pod \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.863046 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-scripts\") pod \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.863140 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-db-sync-config-data\") pod \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.863167 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8t9mg\" (UniqueName: \"kubernetes.io/projected/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-kube-api-access-8t9mg\") pod \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.863195 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-config-data\") pod \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.863225 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-etc-machine-id\") pod \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\" (UID: \"c9e9fec6-00af-46a7-9a1f-a59b6b06969c\") " Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.863692 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "c9e9fec6-00af-46a7-9a1f-a59b6b06969c" (UID: "c9e9fec6-00af-46a7-9a1f-a59b6b06969c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.874229 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "c9e9fec6-00af-46a7-9a1f-a59b6b06969c" (UID: "c9e9fec6-00af-46a7-9a1f-a59b6b06969c"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.877383 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-scripts" (OuterVolumeSpecName: "scripts") pod "c9e9fec6-00af-46a7-9a1f-a59b6b06969c" (UID: "c9e9fec6-00af-46a7-9a1f-a59b6b06969c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.887610 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-kube-api-access-8t9mg" (OuterVolumeSpecName: "kube-api-access-8t9mg") pod "c9e9fec6-00af-46a7-9a1f-a59b6b06969c" (UID: "c9e9fec6-00af-46a7-9a1f-a59b6b06969c"). InnerVolumeSpecName "kube-api-access-8t9mg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.906537 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c9e9fec6-00af-46a7-9a1f-a59b6b06969c" (UID: "c9e9fec6-00af-46a7-9a1f-a59b6b06969c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.916619 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-config-data" (OuterVolumeSpecName: "config-data") pod "c9e9fec6-00af-46a7-9a1f-a59b6b06969c" (UID: "c9e9fec6-00af-46a7-9a1f-a59b6b06969c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.965040 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.965067 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.965077 4710 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.965086 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8t9mg\" (UniqueName: \"kubernetes.io/projected/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-kube-api-access-8t9mg\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.965095 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:06 crc kubenswrapper[4710]: I1009 09:21:06.965103 4710 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c9e9fec6-00af-46a7-9a1f-a59b6b06969c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.453850 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a6d7475-ed84-4a7c-986e-9052c176eac0","Type":"ContainerStarted","Data":"004f64e5aadafecc3ec70724ac839508345026caa919e629e1ee82fadd547eb3"} Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.456230 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.456342 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-rmqx2" event={"ID":"c9e9fec6-00af-46a7-9a1f-a59b6b06969c","Type":"ContainerDied","Data":"95e76f0736539dd373fb68b94a1b331e5b4e6458c78f063746f9a0c55c29fe65"} Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.456413 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95e76f0736539dd373fb68b94a1b331e5b4e6458c78f063746f9a0c55c29fe65" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.456411 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-rmqx2" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.482835 4710 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="811d09f1-53e2-4617-8c12-62448ef1f729" podUID="b7779e6d-a6fb-4d03-8636-0dafb2767cbc" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.674964 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-655f9d4b56-jw9nr" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.680703 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 09:21:07 crc kubenswrapper[4710]: E1009 09:21:07.681125 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9e9fec6-00af-46a7-9a1f-a59b6b06969c" containerName="cinder-db-sync" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.681187 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9e9fec6-00af-46a7-9a1f-a59b6b06969c" containerName="cinder-db-sync" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.681424 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9e9fec6-00af-46a7-9a1f-a59b6b06969c" containerName="cinder-db-sync" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.682358 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.690852 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-zkcnb" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.691112 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.695526 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.701904 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.730581 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.802931 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.802979 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.803088 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b524d89d-aca1-473a-a8d5-ab9749fe1322-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.803118 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-scripts\") pod \"cinder-scheduler-0\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.803317 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ps5tz\" (UniqueName: \"kubernetes.io/projected/b524d89d-aca1-473a-a8d5-ab9749fe1322-kube-api-access-ps5tz\") pod \"cinder-scheduler-0\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.803349 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-config-data\") pod \"cinder-scheduler-0\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.820951 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7bdc9d6cdc-lv29t"] Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.831143 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.838398 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bdc9d6cdc-lv29t"] Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.906358 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-scripts\") pod \"cinder-scheduler-0\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.906516 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-ovsdbserver-sb\") pod \"dnsmasq-dns-7bdc9d6cdc-lv29t\" (UID: \"f6159738-132a-43a9-a072-4925e12092b1\") " pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.906620 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jdfx\" (UniqueName: \"kubernetes.io/projected/f6159738-132a-43a9-a072-4925e12092b1-kube-api-access-9jdfx\") pod \"dnsmasq-dns-7bdc9d6cdc-lv29t\" (UID: \"f6159738-132a-43a9-a072-4925e12092b1\") " pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.906693 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ps5tz\" (UniqueName: \"kubernetes.io/projected/b524d89d-aca1-473a-a8d5-ab9749fe1322-kube-api-access-ps5tz\") pod \"cinder-scheduler-0\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.906717 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-config\") pod \"dnsmasq-dns-7bdc9d6cdc-lv29t\" (UID: \"f6159738-132a-43a9-a072-4925e12092b1\") " pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.906739 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-config-data\") pod \"cinder-scheduler-0\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.906774 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.906799 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.906814 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-dns-svc\") pod \"dnsmasq-dns-7bdc9d6cdc-lv29t\" (UID: \"f6159738-132a-43a9-a072-4925e12092b1\") " pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.906859 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-ovsdbserver-nb\") pod \"dnsmasq-dns-7bdc9d6cdc-lv29t\" (UID: \"f6159738-132a-43a9-a072-4925e12092b1\") " pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.906896 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b524d89d-aca1-473a-a8d5-ab9749fe1322-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.906984 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b524d89d-aca1-473a-a8d5-ab9749fe1322-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.913588 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-config-data\") pod \"cinder-scheduler-0\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.924971 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.929095 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.930308 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-scripts\") pod \"cinder-scheduler-0\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:07 crc kubenswrapper[4710]: I1009 09:21:07.933425 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ps5tz\" (UniqueName: \"kubernetes.io/projected/b524d89d-aca1-473a-a8d5-ab9749fe1322-kube-api-access-ps5tz\") pod \"cinder-scheduler-0\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.010507 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.011996 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.014163 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jdfx\" (UniqueName: \"kubernetes.io/projected/f6159738-132a-43a9-a072-4925e12092b1-kube-api-access-9jdfx\") pod \"dnsmasq-dns-7bdc9d6cdc-lv29t\" (UID: \"f6159738-132a-43a9-a072-4925e12092b1\") " pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.014336 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-config\") pod \"dnsmasq-dns-7bdc9d6cdc-lv29t\" (UID: \"f6159738-132a-43a9-a072-4925e12092b1\") " pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.019550 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-dns-svc\") pod \"dnsmasq-dns-7bdc9d6cdc-lv29t\" (UID: \"f6159738-132a-43a9-a072-4925e12092b1\") " pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.020018 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-ovsdbserver-nb\") pod \"dnsmasq-dns-7bdc9d6cdc-lv29t\" (UID: \"f6159738-132a-43a9-a072-4925e12092b1\") " pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.014607 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.019458 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.020547 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-dns-svc\") pod \"dnsmasq-dns-7bdc9d6cdc-lv29t\" (UID: \"f6159738-132a-43a9-a072-4925e12092b1\") " pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.020810 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-ovsdbserver-sb\") pod \"dnsmasq-dns-7bdc9d6cdc-lv29t\" (UID: \"f6159738-132a-43a9-a072-4925e12092b1\") " pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.020833 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-ovsdbserver-nb\") pod \"dnsmasq-dns-7bdc9d6cdc-lv29t\" (UID: \"f6159738-132a-43a9-a072-4925e12092b1\") " pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.021608 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-config\") pod \"dnsmasq-dns-7bdc9d6cdc-lv29t\" (UID: \"f6159738-132a-43a9-a072-4925e12092b1\") " pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.028739 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-ovsdbserver-sb\") pod \"dnsmasq-dns-7bdc9d6cdc-lv29t\" (UID: \"f6159738-132a-43a9-a072-4925e12092b1\") " pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.037051 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.041387 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jdfx\" (UniqueName: \"kubernetes.io/projected/f6159738-132a-43a9-a072-4925e12092b1-kube-api-access-9jdfx\") pod \"dnsmasq-dns-7bdc9d6cdc-lv29t\" (UID: \"f6159738-132a-43a9-a072-4925e12092b1\") " pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.125881 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-scripts\") pod \"cinder-api-0\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.126246 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-config-data-custom\") pod \"cinder-api-0\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.126343 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8d9m8\" (UniqueName: \"kubernetes.io/projected/97a2293b-fc2e-4333-b652-dcda4c009a30-kube-api-access-8d9m8\") pod \"cinder-api-0\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.126448 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.126482 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-config-data\") pod \"cinder-api-0\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.126574 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97a2293b-fc2e-4333-b652-dcda4c009a30-logs\") pod \"cinder-api-0\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.126674 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/97a2293b-fc2e-4333-b652-dcda4c009a30-etc-machine-id\") pod \"cinder-api-0\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.156848 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.228361 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.228413 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-config-data\") pod \"cinder-api-0\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.228481 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97a2293b-fc2e-4333-b652-dcda4c009a30-logs\") pod \"cinder-api-0\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.228547 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/97a2293b-fc2e-4333-b652-dcda4c009a30-etc-machine-id\") pod \"cinder-api-0\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.228627 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-scripts\") pod \"cinder-api-0\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.228657 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-config-data-custom\") pod \"cinder-api-0\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.228684 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8d9m8\" (UniqueName: \"kubernetes.io/projected/97a2293b-fc2e-4333-b652-dcda4c009a30-kube-api-access-8d9m8\") pod \"cinder-api-0\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.230213 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/97a2293b-fc2e-4333-b652-dcda4c009a30-etc-machine-id\") pod \"cinder-api-0\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.243462 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97a2293b-fc2e-4333-b652-dcda4c009a30-logs\") pod \"cinder-api-0\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.245984 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.258158 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-scripts\") pod \"cinder-api-0\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.260211 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-config-data\") pod \"cinder-api-0\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.268849 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8d9m8\" (UniqueName: \"kubernetes.io/projected/97a2293b-fc2e-4333-b652-dcda4c009a30-kube-api-access-8d9m8\") pod \"cinder-api-0\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.282524 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-config-data-custom\") pod \"cinder-api-0\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.339412 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.356575 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-78c6b597d-zsdxm" podUID="ad719353-0773-432e-921f-a1480314f1c2" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.149:9311/healthcheck\": read tcp 10.217.0.2:40714->10.217.0.149:9311: read: connection reset by peer" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.356707 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-78c6b597d-zsdxm" podUID="ad719353-0773-432e-921f-a1480314f1c2" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.149:9311/healthcheck\": read tcp 10.217.0.2:40706->10.217.0.149:9311: read: connection reset by peer" Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.516193 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a6d7475-ed84-4a7c-986e-9052c176eac0","Type":"ContainerStarted","Data":"e6f848fb347aceb32465798d63489338935dede41d3adb94909b15db6c7a1923"} Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.521466 4710 generic.go:334] "Generic (PLEG): container finished" podID="ad719353-0773-432e-921f-a1480314f1c2" containerID="ea1fd699d6d3b22a5512d4e8c55da578c00f8e02823b707b8341a3423bccd604" exitCode=0 Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.521519 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-78c6b597d-zsdxm" event={"ID":"ad719353-0773-432e-921f-a1480314f1c2","Type":"ContainerDied","Data":"ea1fd699d6d3b22a5512d4e8c55da578c00f8e02823b707b8341a3423bccd604"} Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.704407 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 09:21:08 crc kubenswrapper[4710]: I1009 09:21:08.785573 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bdc9d6cdc-lv29t"] Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.085507 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.166929 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ad719353-0773-432e-921f-a1480314f1c2-config-data-custom\") pod \"ad719353-0773-432e-921f-a1480314f1c2\" (UID: \"ad719353-0773-432e-921f-a1480314f1c2\") " Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.167081 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad719353-0773-432e-921f-a1480314f1c2-config-data\") pod \"ad719353-0773-432e-921f-a1480314f1c2\" (UID: \"ad719353-0773-432e-921f-a1480314f1c2\") " Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.167176 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad719353-0773-432e-921f-a1480314f1c2-logs\") pod \"ad719353-0773-432e-921f-a1480314f1c2\" (UID: \"ad719353-0773-432e-921f-a1480314f1c2\") " Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.167730 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6plx\" (UniqueName: \"kubernetes.io/projected/ad719353-0773-432e-921f-a1480314f1c2-kube-api-access-p6plx\") pod \"ad719353-0773-432e-921f-a1480314f1c2\" (UID: \"ad719353-0773-432e-921f-a1480314f1c2\") " Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.167989 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad719353-0773-432e-921f-a1480314f1c2-combined-ca-bundle\") pod \"ad719353-0773-432e-921f-a1480314f1c2\" (UID: \"ad719353-0773-432e-921f-a1480314f1c2\") " Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.169821 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad719353-0773-432e-921f-a1480314f1c2-logs" (OuterVolumeSpecName: "logs") pod "ad719353-0773-432e-921f-a1480314f1c2" (UID: "ad719353-0773-432e-921f-a1480314f1c2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.175583 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad719353-0773-432e-921f-a1480314f1c2-kube-api-access-p6plx" (OuterVolumeSpecName: "kube-api-access-p6plx") pod "ad719353-0773-432e-921f-a1480314f1c2" (UID: "ad719353-0773-432e-921f-a1480314f1c2"). InnerVolumeSpecName "kube-api-access-p6plx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.193635 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad719353-0773-432e-921f-a1480314f1c2-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ad719353-0773-432e-921f-a1480314f1c2" (UID: "ad719353-0773-432e-921f-a1480314f1c2"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.232772 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad719353-0773-432e-921f-a1480314f1c2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ad719353-0773-432e-921f-a1480314f1c2" (UID: "ad719353-0773-432e-921f-a1480314f1c2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.241421 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.259831 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad719353-0773-432e-921f-a1480314f1c2-config-data" (OuterVolumeSpecName: "config-data") pod "ad719353-0773-432e-921f-a1480314f1c2" (UID: "ad719353-0773-432e-921f-a1480314f1c2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.271007 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad719353-0773-432e-921f-a1480314f1c2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.271047 4710 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ad719353-0773-432e-921f-a1480314f1c2-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.271058 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad719353-0773-432e-921f-a1480314f1c2-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.271068 4710 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad719353-0773-432e-921f-a1480314f1c2-logs\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.271078 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6plx\" (UniqueName: \"kubernetes.io/projected/ad719353-0773-432e-921f-a1480314f1c2-kube-api-access-p6plx\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.560748 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b524d89d-aca1-473a-a8d5-ab9749fe1322","Type":"ContainerStarted","Data":"ce88f352c23830065c9099caab2ea18ad9a212e7ff58fbffe0c4ee36f35db1b9"} Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.593637 4710 generic.go:334] "Generic (PLEG): container finished" podID="f6159738-132a-43a9-a072-4925e12092b1" containerID="2191a6bdf8db1147f45d40e382bd918b598f30ff654e2671995af4f8fd6dabbe" exitCode=0 Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.593767 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" event={"ID":"f6159738-132a-43a9-a072-4925e12092b1","Type":"ContainerDied","Data":"2191a6bdf8db1147f45d40e382bd918b598f30ff654e2671995af4f8fd6dabbe"} Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.593804 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" event={"ID":"f6159738-132a-43a9-a072-4925e12092b1","Type":"ContainerStarted","Data":"700ace9e83b873378e72b2aea8136eb48850f72a18fc8950546e09b0e6f9b2b5"} Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.607935 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"97a2293b-fc2e-4333-b652-dcda4c009a30","Type":"ContainerStarted","Data":"88c4449e0688b20cecb8d7d8acf8f2ccb3a387f7940cfc7ed0e3840b82454cfc"} Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.620001 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-bf8dfcdb5-zd6wv" Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.637119 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-78c6b597d-zsdxm" event={"ID":"ad719353-0773-432e-921f-a1480314f1c2","Type":"ContainerDied","Data":"737ee9a3d8e275b67d820f5baf0a42dc0ca60a7142aefdc35c61f5b7b7a5e6e4"} Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.637176 4710 scope.go:117] "RemoveContainer" containerID="ea1fd699d6d3b22a5512d4e8c55da578c00f8e02823b707b8341a3423bccd604" Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.637371 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-78c6b597d-zsdxm" Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.739549 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-655f9d4b56-jw9nr"] Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.739813 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-655f9d4b56-jw9nr" podUID="a9a88de3-88a2-4f91-8c5a-cafb299cd9ac" containerName="neutron-api" containerID="cri-o://a3d876af5f44c7bc1a90e562a2f6dcf6cfbc967a24db943267055bc38f2678d6" gracePeriod=30 Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.740179 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-655f9d4b56-jw9nr" podUID="a9a88de3-88a2-4f91-8c5a-cafb299cd9ac" containerName="neutron-httpd" containerID="cri-o://8f194ab7b3d2a496c0beb362af7650aa9d4b2bca4ff553742b37aee8dbc32e28" gracePeriod=30 Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.757288 4710 scope.go:117] "RemoveContainer" containerID="b9b577012a27a5f084d8b20bda3e97fff33b6983215e963f3fd3596d4bc021aa" Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.804052 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-78c6b597d-zsdxm"] Oct 09 09:21:09 crc kubenswrapper[4710]: I1009 09:21:09.815262 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-78c6b597d-zsdxm"] Oct 09 09:21:10 crc kubenswrapper[4710]: I1009 09:21:10.632014 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 09 09:21:10 crc kubenswrapper[4710]: I1009 09:21:10.702331 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a6d7475-ed84-4a7c-986e-9052c176eac0","Type":"ContainerStarted","Data":"9b0b25cd1e120f846ed0aa6630a4bcd58d7b453a5a690457dd501c0e5e655873"} Oct 09 09:21:10 crc kubenswrapper[4710]: I1009 09:21:10.703447 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 09 09:21:10 crc kubenswrapper[4710]: I1009 09:21:10.717602 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"97a2293b-fc2e-4333-b652-dcda4c009a30","Type":"ContainerStarted","Data":"0e5de8ce6910add5431d5c339b711b51d4adef07885ec1dfbc1da72c1cca79f7"} Oct 09 09:21:10 crc kubenswrapper[4710]: I1009 09:21:10.730227 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.339247681 podStartE2EDuration="7.730214491s" podCreationTimestamp="2025-10-09 09:21:03 +0000 UTC" firstStartedPulling="2025-10-09 09:21:04.225894011 +0000 UTC m=+987.716002408" lastFinishedPulling="2025-10-09 09:21:09.61686082 +0000 UTC m=+993.106969218" observedRunningTime="2025-10-09 09:21:10.728473988 +0000 UTC m=+994.218582385" watchObservedRunningTime="2025-10-09 09:21:10.730214491 +0000 UTC m=+994.220322887" Oct 09 09:21:10 crc kubenswrapper[4710]: I1009 09:21:10.734593 4710 generic.go:334] "Generic (PLEG): container finished" podID="a9a88de3-88a2-4f91-8c5a-cafb299cd9ac" containerID="8f194ab7b3d2a496c0beb362af7650aa9d4b2bca4ff553742b37aee8dbc32e28" exitCode=0 Oct 09 09:21:10 crc kubenswrapper[4710]: I1009 09:21:10.734681 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-655f9d4b56-jw9nr" event={"ID":"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac","Type":"ContainerDied","Data":"8f194ab7b3d2a496c0beb362af7650aa9d4b2bca4ff553742b37aee8dbc32e28"} Oct 09 09:21:10 crc kubenswrapper[4710]: I1009 09:21:10.750347 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" event={"ID":"f6159738-132a-43a9-a072-4925e12092b1","Type":"ContainerStarted","Data":"722a359f11280231152133e32107272fbe684d6adcf7fa0da9f39e9cd5bb0cd4"} Oct 09 09:21:10 crc kubenswrapper[4710]: I1009 09:21:10.751657 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" Oct 09 09:21:10 crc kubenswrapper[4710]: I1009 09:21:10.772817 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" podStartSLOduration=3.77280258 podStartE2EDuration="3.77280258s" podCreationTimestamp="2025-10-09 09:21:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:21:10.770357369 +0000 UTC m=+994.260465766" watchObservedRunningTime="2025-10-09 09:21:10.77280258 +0000 UTC m=+994.262910977" Oct 09 09:21:10 crc kubenswrapper[4710]: I1009 09:21:10.848085 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad719353-0773-432e-921f-a1480314f1c2" path="/var/lib/kubelet/pods/ad719353-0773-432e-921f-a1480314f1c2/volumes" Oct 09 09:21:11 crc kubenswrapper[4710]: I1009 09:21:11.784118 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"97a2293b-fc2e-4333-b652-dcda4c009a30","Type":"ContainerStarted","Data":"c32db9a05dbfb1d77905d678d4e00d6a1e85437a9694f70c226b7d1933f1d1cd"} Oct 09 09:21:11 crc kubenswrapper[4710]: I1009 09:21:11.790380 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 09 09:21:11 crc kubenswrapper[4710]: I1009 09:21:11.784776 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="97a2293b-fc2e-4333-b652-dcda4c009a30" containerName="cinder-api" containerID="cri-o://c32db9a05dbfb1d77905d678d4e00d6a1e85437a9694f70c226b7d1933f1d1cd" gracePeriod=30 Oct 09 09:21:11 crc kubenswrapper[4710]: I1009 09:21:11.784516 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="97a2293b-fc2e-4333-b652-dcda4c009a30" containerName="cinder-api-log" containerID="cri-o://0e5de8ce6910add5431d5c339b711b51d4adef07885ec1dfbc1da72c1cca79f7" gracePeriod=30 Oct 09 09:21:11 crc kubenswrapper[4710]: I1009 09:21:11.805983 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b524d89d-aca1-473a-a8d5-ab9749fe1322","Type":"ContainerStarted","Data":"27e79067019b49a428b81cb4379080eee1f9832098082547c3ee497a46fe9743"} Oct 09 09:21:11 crc kubenswrapper[4710]: I1009 09:21:11.815599 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.815579642 podStartE2EDuration="4.815579642s" podCreationTimestamp="2025-10-09 09:21:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:21:11.799870369 +0000 UTC m=+995.289978766" watchObservedRunningTime="2025-10-09 09:21:11.815579642 +0000 UTC m=+995.305688039" Oct 09 09:21:12 crc kubenswrapper[4710]: I1009 09:21:12.833695 4710 generic.go:334] "Generic (PLEG): container finished" podID="97a2293b-fc2e-4333-b652-dcda4c009a30" containerID="0e5de8ce6910add5431d5c339b711b51d4adef07885ec1dfbc1da72c1cca79f7" exitCode=143 Oct 09 09:21:12 crc kubenswrapper[4710]: I1009 09:21:12.834218 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"97a2293b-fc2e-4333-b652-dcda4c009a30","Type":"ContainerDied","Data":"0e5de8ce6910add5431d5c339b711b51d4adef07885ec1dfbc1da72c1cca79f7"} Oct 09 09:21:12 crc kubenswrapper[4710]: I1009 09:21:12.836302 4710 generic.go:334] "Generic (PLEG): container finished" podID="a9a88de3-88a2-4f91-8c5a-cafb299cd9ac" containerID="a3d876af5f44c7bc1a90e562a2f6dcf6cfbc967a24db943267055bc38f2678d6" exitCode=0 Oct 09 09:21:12 crc kubenswrapper[4710]: I1009 09:21:12.836350 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-655f9d4b56-jw9nr" event={"ID":"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac","Type":"ContainerDied","Data":"a3d876af5f44c7bc1a90e562a2f6dcf6cfbc967a24db943267055bc38f2678d6"} Oct 09 09:21:12 crc kubenswrapper[4710]: I1009 09:21:12.837571 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b524d89d-aca1-473a-a8d5-ab9749fe1322","Type":"ContainerStarted","Data":"9e6abe0c4f3d6476fabe1a2f7ed327b4b69c99dd21ddfbf5e2c603ff72f975b3"} Oct 09 09:21:12 crc kubenswrapper[4710]: I1009 09:21:12.860574 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.403884427 podStartE2EDuration="5.860563325s" podCreationTimestamp="2025-10-09 09:21:07 +0000 UTC" firstStartedPulling="2025-10-09 09:21:08.80034272 +0000 UTC m=+992.290451117" lastFinishedPulling="2025-10-09 09:21:10.257021628 +0000 UTC m=+993.747130015" observedRunningTime="2025-10-09 09:21:12.856760604 +0000 UTC m=+996.346869001" watchObservedRunningTime="2025-10-09 09:21:12.860563325 +0000 UTC m=+996.350671722" Oct 09 09:21:12 crc kubenswrapper[4710]: I1009 09:21:12.925551 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-655f9d4b56-jw9nr" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.019860 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.118823 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-httpd-config\") pod \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\" (UID: \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\") " Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.118866 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-ovndb-tls-certs\") pod \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\" (UID: \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\") " Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.118947 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-config\") pod \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\" (UID: \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\") " Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.118982 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kd9jb\" (UniqueName: \"kubernetes.io/projected/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-kube-api-access-kd9jb\") pod \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\" (UID: \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\") " Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.119089 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-combined-ca-bundle\") pod \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\" (UID: \"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac\") " Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.137594 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "a9a88de3-88a2-4f91-8c5a-cafb299cd9ac" (UID: "a9a88de3-88a2-4f91-8c5a-cafb299cd9ac"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.142925 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-kube-api-access-kd9jb" (OuterVolumeSpecName: "kube-api-access-kd9jb") pod "a9a88de3-88a2-4f91-8c5a-cafb299cd9ac" (UID: "a9a88de3-88a2-4f91-8c5a-cafb299cd9ac"). InnerVolumeSpecName "kube-api-access-kd9jb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.166798 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a9a88de3-88a2-4f91-8c5a-cafb299cd9ac" (UID: "a9a88de3-88a2-4f91-8c5a-cafb299cd9ac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.187600 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-config" (OuterVolumeSpecName: "config") pod "a9a88de3-88a2-4f91-8c5a-cafb299cd9ac" (UID: "a9a88de3-88a2-4f91-8c5a-cafb299cd9ac"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.220678 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "a9a88de3-88a2-4f91-8c5a-cafb299cd9ac" (UID: "a9a88de3-88a2-4f91-8c5a-cafb299cd9ac"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.222229 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.222269 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kd9jb\" (UniqueName: \"kubernetes.io/projected/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-kube-api-access-kd9jb\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.222280 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.222293 4710 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.222303 4710 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.446688 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-6dwxx"] Oct 09 09:21:13 crc kubenswrapper[4710]: E1009 09:21:13.447235 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9a88de3-88a2-4f91-8c5a-cafb299cd9ac" containerName="neutron-api" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.447253 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9a88de3-88a2-4f91-8c5a-cafb299cd9ac" containerName="neutron-api" Oct 09 09:21:13 crc kubenswrapper[4710]: E1009 09:21:13.447289 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad719353-0773-432e-921f-a1480314f1c2" containerName="barbican-api-log" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.447295 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad719353-0773-432e-921f-a1480314f1c2" containerName="barbican-api-log" Oct 09 09:21:13 crc kubenswrapper[4710]: E1009 09:21:13.447304 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad719353-0773-432e-921f-a1480314f1c2" containerName="barbican-api" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.447310 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad719353-0773-432e-921f-a1480314f1c2" containerName="barbican-api" Oct 09 09:21:13 crc kubenswrapper[4710]: E1009 09:21:13.447320 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9a88de3-88a2-4f91-8c5a-cafb299cd9ac" containerName="neutron-httpd" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.447327 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9a88de3-88a2-4f91-8c5a-cafb299cd9ac" containerName="neutron-httpd" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.447525 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9a88de3-88a2-4f91-8c5a-cafb299cd9ac" containerName="neutron-api" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.447543 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad719353-0773-432e-921f-a1480314f1c2" containerName="barbican-api" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.447552 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad719353-0773-432e-921f-a1480314f1c2" containerName="barbican-api-log" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.447562 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9a88de3-88a2-4f91-8c5a-cafb299cd9ac" containerName="neutron-httpd" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.448092 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-6dwxx" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.463235 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-6dwxx"] Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.527113 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgr24\" (UniqueName: \"kubernetes.io/projected/ba8ecfdc-58cc-457e-971c-13583f94d4ee-kube-api-access-rgr24\") pod \"nova-api-db-create-6dwxx\" (UID: \"ba8ecfdc-58cc-457e-971c-13583f94d4ee\") " pod="openstack/nova-api-db-create-6dwxx" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.570819 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-jkdrz"] Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.572099 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jkdrz" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.588110 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-jkdrz"] Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.635115 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgr24\" (UniqueName: \"kubernetes.io/projected/ba8ecfdc-58cc-457e-971c-13583f94d4ee-kube-api-access-rgr24\") pod \"nova-api-db-create-6dwxx\" (UID: \"ba8ecfdc-58cc-457e-971c-13583f94d4ee\") " pod="openstack/nova-api-db-create-6dwxx" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.635214 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hk87\" (UniqueName: \"kubernetes.io/projected/03740f1c-82bd-434e-9544-be5dc802648a-kube-api-access-6hk87\") pod \"nova-cell0-db-create-jkdrz\" (UID: \"03740f1c-82bd-434e-9544-be5dc802648a\") " pod="openstack/nova-cell0-db-create-jkdrz" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.659797 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-skpfw"] Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.661958 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-skpfw" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.671465 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-skpfw"] Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.674687 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgr24\" (UniqueName: \"kubernetes.io/projected/ba8ecfdc-58cc-457e-971c-13583f94d4ee-kube-api-access-rgr24\") pod \"nova-api-db-create-6dwxx\" (UID: \"ba8ecfdc-58cc-457e-971c-13583f94d4ee\") " pod="openstack/nova-api-db-create-6dwxx" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.738444 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hk87\" (UniqueName: \"kubernetes.io/projected/03740f1c-82bd-434e-9544-be5dc802648a-kube-api-access-6hk87\") pod \"nova-cell0-db-create-jkdrz\" (UID: \"03740f1c-82bd-434e-9544-be5dc802648a\") " pod="openstack/nova-cell0-db-create-jkdrz" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.764732 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-6dwxx" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.774548 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hk87\" (UniqueName: \"kubernetes.io/projected/03740f1c-82bd-434e-9544-be5dc802648a-kube-api-access-6hk87\") pod \"nova-cell0-db-create-jkdrz\" (UID: \"03740f1c-82bd-434e-9544-be5dc802648a\") " pod="openstack/nova-cell0-db-create-jkdrz" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.841504 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vkfn\" (UniqueName: \"kubernetes.io/projected/311d9086-0967-491c-91c7-0c783685fb7e-kube-api-access-6vkfn\") pod \"nova-cell1-db-create-skpfw\" (UID: \"311d9086-0967-491c-91c7-0c783685fb7e\") " pod="openstack/nova-cell1-db-create-skpfw" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.863538 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-655f9d4b56-jw9nr" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.863684 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-655f9d4b56-jw9nr" event={"ID":"a9a88de3-88a2-4f91-8c5a-cafb299cd9ac","Type":"ContainerDied","Data":"d671a8139c3e7f885b6236bcb926f98cd4c2022ae1b417292960d9de05ff628d"} Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.863732 4710 scope.go:117] "RemoveContainer" containerID="8f194ab7b3d2a496c0beb362af7650aa9d4b2bca4ff553742b37aee8dbc32e28" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.906116 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-655f9d4b56-jw9nr"] Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.915758 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-655f9d4b56-jw9nr"] Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.921694 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jkdrz" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.929807 4710 scope.go:117] "RemoveContainer" containerID="a3d876af5f44c7bc1a90e562a2f6dcf6cfbc967a24db943267055bc38f2678d6" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.944969 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vkfn\" (UniqueName: \"kubernetes.io/projected/311d9086-0967-491c-91c7-0c783685fb7e-kube-api-access-6vkfn\") pod \"nova-cell1-db-create-skpfw\" (UID: \"311d9086-0967-491c-91c7-0c783685fb7e\") " pod="openstack/nova-cell1-db-create-skpfw" Oct 09 09:21:13 crc kubenswrapper[4710]: I1009 09:21:13.968805 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vkfn\" (UniqueName: \"kubernetes.io/projected/311d9086-0967-491c-91c7-0c783685fb7e-kube-api-access-6vkfn\") pod \"nova-cell1-db-create-skpfw\" (UID: \"311d9086-0967-491c-91c7-0c783685fb7e\") " pod="openstack/nova-cell1-db-create-skpfw" Oct 09 09:21:14 crc kubenswrapper[4710]: I1009 09:21:14.047946 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-skpfw" Oct 09 09:21:14 crc kubenswrapper[4710]: I1009 09:21:14.266405 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-6dwxx"] Oct 09 09:21:14 crc kubenswrapper[4710]: I1009 09:21:14.473878 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-jkdrz"] Oct 09 09:21:14 crc kubenswrapper[4710]: I1009 09:21:14.659768 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-skpfw"] Oct 09 09:21:14 crc kubenswrapper[4710]: I1009 09:21:14.826033 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9a88de3-88a2-4f91-8c5a-cafb299cd9ac" path="/var/lib/kubelet/pods/a9a88de3-88a2-4f91-8c5a-cafb299cd9ac/volumes" Oct 09 09:21:14 crc kubenswrapper[4710]: I1009 09:21:14.878256 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-skpfw" event={"ID":"311d9086-0967-491c-91c7-0c783685fb7e","Type":"ContainerStarted","Data":"90582c788621da20e9aa181d10f9754e84bfa44b08cf60e60e735f5600a56e55"} Oct 09 09:21:14 crc kubenswrapper[4710]: I1009 09:21:14.884107 4710 generic.go:334] "Generic (PLEG): container finished" podID="03740f1c-82bd-434e-9544-be5dc802648a" containerID="4c1c2a09b8b4455cee8c59949cdc410f77b86eb9216a93b95aa5d9cde740deb4" exitCode=0 Oct 09 09:21:14 crc kubenswrapper[4710]: I1009 09:21:14.884166 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-jkdrz" event={"ID":"03740f1c-82bd-434e-9544-be5dc802648a","Type":"ContainerDied","Data":"4c1c2a09b8b4455cee8c59949cdc410f77b86eb9216a93b95aa5d9cde740deb4"} Oct 09 09:21:14 crc kubenswrapper[4710]: I1009 09:21:14.884193 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-jkdrz" event={"ID":"03740f1c-82bd-434e-9544-be5dc802648a","Type":"ContainerStarted","Data":"35b91af0b5ac3b8ce63a5aaf5986a3d440442c33c8d20b08d9fa40e2a00a1062"} Oct 09 09:21:14 crc kubenswrapper[4710]: I1009 09:21:14.889551 4710 generic.go:334] "Generic (PLEG): container finished" podID="ba8ecfdc-58cc-457e-971c-13583f94d4ee" containerID="7503d9c57c6aa5494069872083154dca9a14d3285548af2e64fd831ff833c049" exitCode=0 Oct 09 09:21:14 crc kubenswrapper[4710]: I1009 09:21:14.890067 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-6dwxx" event={"ID":"ba8ecfdc-58cc-457e-971c-13583f94d4ee","Type":"ContainerDied","Data":"7503d9c57c6aa5494069872083154dca9a14d3285548af2e64fd831ff833c049"} Oct 09 09:21:14 crc kubenswrapper[4710]: I1009 09:21:14.890095 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-6dwxx" event={"ID":"ba8ecfdc-58cc-457e-971c-13583f94d4ee","Type":"ContainerStarted","Data":"0334cb6d86f2e0f931d9452fa8877fb10489d7ecffd4bf9f67781fefb61cee5b"} Oct 09 09:21:15 crc kubenswrapper[4710]: I1009 09:21:15.901316 4710 generic.go:334] "Generic (PLEG): container finished" podID="311d9086-0967-491c-91c7-0c783685fb7e" containerID="9b2664b53678277fb17e0f0e1fff6d1cecbd394d9d162f68ba0eeb7c6487b88c" exitCode=0 Oct 09 09:21:15 crc kubenswrapper[4710]: I1009 09:21:15.901455 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-skpfw" event={"ID":"311d9086-0967-491c-91c7-0c783685fb7e","Type":"ContainerDied","Data":"9b2664b53678277fb17e0f0e1fff6d1cecbd394d9d162f68ba0eeb7c6487b88c"} Oct 09 09:21:16 crc kubenswrapper[4710]: I1009 09:21:16.307716 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jkdrz" Oct 09 09:21:16 crc kubenswrapper[4710]: I1009 09:21:16.314657 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-6dwxx" Oct 09 09:21:16 crc kubenswrapper[4710]: I1009 09:21:16.334288 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6hk87\" (UniqueName: \"kubernetes.io/projected/03740f1c-82bd-434e-9544-be5dc802648a-kube-api-access-6hk87\") pod \"03740f1c-82bd-434e-9544-be5dc802648a\" (UID: \"03740f1c-82bd-434e-9544-be5dc802648a\") " Oct 09 09:21:16 crc kubenswrapper[4710]: I1009 09:21:16.334793 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgr24\" (UniqueName: \"kubernetes.io/projected/ba8ecfdc-58cc-457e-971c-13583f94d4ee-kube-api-access-rgr24\") pod \"ba8ecfdc-58cc-457e-971c-13583f94d4ee\" (UID: \"ba8ecfdc-58cc-457e-971c-13583f94d4ee\") " Oct 09 09:21:16 crc kubenswrapper[4710]: I1009 09:21:16.356169 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba8ecfdc-58cc-457e-971c-13583f94d4ee-kube-api-access-rgr24" (OuterVolumeSpecName: "kube-api-access-rgr24") pod "ba8ecfdc-58cc-457e-971c-13583f94d4ee" (UID: "ba8ecfdc-58cc-457e-971c-13583f94d4ee"). InnerVolumeSpecName "kube-api-access-rgr24". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:21:16 crc kubenswrapper[4710]: I1009 09:21:16.356512 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03740f1c-82bd-434e-9544-be5dc802648a-kube-api-access-6hk87" (OuterVolumeSpecName: "kube-api-access-6hk87") pod "03740f1c-82bd-434e-9544-be5dc802648a" (UID: "03740f1c-82bd-434e-9544-be5dc802648a"). InnerVolumeSpecName "kube-api-access-6hk87". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:21:16 crc kubenswrapper[4710]: I1009 09:21:16.445281 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgr24\" (UniqueName: \"kubernetes.io/projected/ba8ecfdc-58cc-457e-971c-13583f94d4ee-kube-api-access-rgr24\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:16 crc kubenswrapper[4710]: I1009 09:21:16.445325 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6hk87\" (UniqueName: \"kubernetes.io/projected/03740f1c-82bd-434e-9544-be5dc802648a-kube-api-access-6hk87\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:16 crc kubenswrapper[4710]: I1009 09:21:16.912170 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-jkdrz" event={"ID":"03740f1c-82bd-434e-9544-be5dc802648a","Type":"ContainerDied","Data":"35b91af0b5ac3b8ce63a5aaf5986a3d440442c33c8d20b08d9fa40e2a00a1062"} Oct 09 09:21:16 crc kubenswrapper[4710]: I1009 09:21:16.912214 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="35b91af0b5ac3b8ce63a5aaf5986a3d440442c33c8d20b08d9fa40e2a00a1062" Oct 09 09:21:16 crc kubenswrapper[4710]: I1009 09:21:16.912283 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-jkdrz" Oct 09 09:21:16 crc kubenswrapper[4710]: I1009 09:21:16.925208 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-6dwxx" Oct 09 09:21:16 crc kubenswrapper[4710]: I1009 09:21:16.925411 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-6dwxx" event={"ID":"ba8ecfdc-58cc-457e-971c-13583f94d4ee","Type":"ContainerDied","Data":"0334cb6d86f2e0f931d9452fa8877fb10489d7ecffd4bf9f67781fefb61cee5b"} Oct 09 09:21:16 crc kubenswrapper[4710]: I1009 09:21:16.925474 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0334cb6d86f2e0f931d9452fa8877fb10489d7ecffd4bf9f67781fefb61cee5b" Oct 09 09:21:18 crc kubenswrapper[4710]: I1009 09:21:18.158971 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" Oct 09 09:21:18 crc kubenswrapper[4710]: I1009 09:21:18.210831 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9"] Oct 09 09:21:18 crc kubenswrapper[4710]: I1009 09:21:18.211209 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" podUID="86ca38b0-140d-419f-9a96-6c9b3825a1ea" containerName="dnsmasq-dns" containerID="cri-o://a7b05d7dd1c08ff109fc3de6efee93c90bc3904a19ca8249e59899f5a4e1f924" gracePeriod=10 Oct 09 09:21:18 crc kubenswrapper[4710]: I1009 09:21:18.323780 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 09 09:21:18 crc kubenswrapper[4710]: I1009 09:21:18.361281 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 09:21:18 crc kubenswrapper[4710]: I1009 09:21:18.951388 4710 generic.go:334] "Generic (PLEG): container finished" podID="86ca38b0-140d-419f-9a96-6c9b3825a1ea" containerID="a7b05d7dd1c08ff109fc3de6efee93c90bc3904a19ca8249e59899f5a4e1f924" exitCode=0 Oct 09 09:21:18 crc kubenswrapper[4710]: I1009 09:21:18.951939 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="b524d89d-aca1-473a-a8d5-ab9749fe1322" containerName="cinder-scheduler" containerID="cri-o://27e79067019b49a428b81cb4379080eee1f9832098082547c3ee497a46fe9743" gracePeriod=30 Oct 09 09:21:18 crc kubenswrapper[4710]: I1009 09:21:18.951629 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" event={"ID":"86ca38b0-140d-419f-9a96-6c9b3825a1ea","Type":"ContainerDied","Data":"a7b05d7dd1c08ff109fc3de6efee93c90bc3904a19ca8249e59899f5a4e1f924"} Oct 09 09:21:18 crc kubenswrapper[4710]: I1009 09:21:18.952409 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="b524d89d-aca1-473a-a8d5-ab9749fe1322" containerName="probe" containerID="cri-o://9e6abe0c4f3d6476fabe1a2f7ed327b4b69c99dd21ddfbf5e2c603ff72f975b3" gracePeriod=30 Oct 09 09:21:19 crc kubenswrapper[4710]: I1009 09:21:19.484386 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:21:19 crc kubenswrapper[4710]: I1009 09:21:19.484706 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2a6d7475-ed84-4a7c-986e-9052c176eac0" containerName="proxy-httpd" containerID="cri-o://9b0b25cd1e120f846ed0aa6630a4bcd58d7b453a5a690457dd501c0e5e655873" gracePeriod=30 Oct 09 09:21:19 crc kubenswrapper[4710]: I1009 09:21:19.484788 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2a6d7475-ed84-4a7c-986e-9052c176eac0" containerName="sg-core" containerID="cri-o://e6f848fb347aceb32465798d63489338935dede41d3adb94909b15db6c7a1923" gracePeriod=30 Oct 09 09:21:19 crc kubenswrapper[4710]: I1009 09:21:19.484833 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2a6d7475-ed84-4a7c-986e-9052c176eac0" containerName="ceilometer-notification-agent" containerID="cri-o://004f64e5aadafecc3ec70724ac839508345026caa919e629e1ee82fadd547eb3" gracePeriod=30 Oct 09 09:21:19 crc kubenswrapper[4710]: I1009 09:21:19.484942 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="2a6d7475-ed84-4a7c-986e-9052c176eac0" containerName="ceilometer-central-agent" containerID="cri-o://2dacd45e9146ad05466bc69726fd4c7841a6ed80df689390968d80b917787840" gracePeriod=30 Oct 09 09:21:19 crc kubenswrapper[4710]: I1009 09:21:19.967378 4710 generic.go:334] "Generic (PLEG): container finished" podID="2a6d7475-ed84-4a7c-986e-9052c176eac0" containerID="9b0b25cd1e120f846ed0aa6630a4bcd58d7b453a5a690457dd501c0e5e655873" exitCode=0 Oct 09 09:21:19 crc kubenswrapper[4710]: I1009 09:21:19.967732 4710 generic.go:334] "Generic (PLEG): container finished" podID="2a6d7475-ed84-4a7c-986e-9052c176eac0" containerID="e6f848fb347aceb32465798d63489338935dede41d3adb94909b15db6c7a1923" exitCode=2 Oct 09 09:21:19 crc kubenswrapper[4710]: I1009 09:21:19.967744 4710 generic.go:334] "Generic (PLEG): container finished" podID="2a6d7475-ed84-4a7c-986e-9052c176eac0" containerID="2dacd45e9146ad05466bc69726fd4c7841a6ed80df689390968d80b917787840" exitCode=0 Oct 09 09:21:19 crc kubenswrapper[4710]: I1009 09:21:19.967602 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a6d7475-ed84-4a7c-986e-9052c176eac0","Type":"ContainerDied","Data":"9b0b25cd1e120f846ed0aa6630a4bcd58d7b453a5a690457dd501c0e5e655873"} Oct 09 09:21:19 crc kubenswrapper[4710]: I1009 09:21:19.967818 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a6d7475-ed84-4a7c-986e-9052c176eac0","Type":"ContainerDied","Data":"e6f848fb347aceb32465798d63489338935dede41d3adb94909b15db6c7a1923"} Oct 09 09:21:19 crc kubenswrapper[4710]: I1009 09:21:19.967841 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a6d7475-ed84-4a7c-986e-9052c176eac0","Type":"ContainerDied","Data":"2dacd45e9146ad05466bc69726fd4c7841a6ed80df689390968d80b917787840"} Oct 09 09:21:19 crc kubenswrapper[4710]: I1009 09:21:19.971900 4710 generic.go:334] "Generic (PLEG): container finished" podID="b524d89d-aca1-473a-a8d5-ab9749fe1322" containerID="9e6abe0c4f3d6476fabe1a2f7ed327b4b69c99dd21ddfbf5e2c603ff72f975b3" exitCode=0 Oct 09 09:21:19 crc kubenswrapper[4710]: I1009 09:21:19.971922 4710 generic.go:334] "Generic (PLEG): container finished" podID="b524d89d-aca1-473a-a8d5-ab9749fe1322" containerID="27e79067019b49a428b81cb4379080eee1f9832098082547c3ee497a46fe9743" exitCode=0 Oct 09 09:21:19 crc kubenswrapper[4710]: I1009 09:21:19.971954 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b524d89d-aca1-473a-a8d5-ab9749fe1322","Type":"ContainerDied","Data":"9e6abe0c4f3d6476fabe1a2f7ed327b4b69c99dd21ddfbf5e2c603ff72f975b3"} Oct 09 09:21:19 crc kubenswrapper[4710]: I1009 09:21:19.971973 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b524d89d-aca1-473a-a8d5-ab9749fe1322","Type":"ContainerDied","Data":"27e79067019b49a428b81cb4379080eee1f9832098082547c3ee497a46fe9743"} Oct 09 09:21:20 crc kubenswrapper[4710]: I1009 09:21:20.496237 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 09 09:21:20 crc kubenswrapper[4710]: I1009 09:21:20.779490 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" podUID="86ca38b0-140d-419f-9a96-6c9b3825a1ea" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.148:5353: connect: connection refused" Oct 09 09:21:21 crc kubenswrapper[4710]: I1009 09:21:21.991062 4710 generic.go:334] "Generic (PLEG): container finished" podID="2a6d7475-ed84-4a7c-986e-9052c176eac0" containerID="004f64e5aadafecc3ec70724ac839508345026caa919e629e1ee82fadd547eb3" exitCode=0 Oct 09 09:21:21 crc kubenswrapper[4710]: I1009 09:21:21.991399 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a6d7475-ed84-4a7c-986e-9052c176eac0","Type":"ContainerDied","Data":"004f64e5aadafecc3ec70724ac839508345026caa919e629e1ee82fadd547eb3"} Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.015084 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-skpfw" event={"ID":"311d9086-0967-491c-91c7-0c783685fb7e","Type":"ContainerDied","Data":"90582c788621da20e9aa181d10f9754e84bfa44b08cf60e60e735f5600a56e55"} Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.015130 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="90582c788621da20e9aa181d10f9754e84bfa44b08cf60e60e735f5600a56e55" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.104358 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-skpfw" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.208936 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vkfn\" (UniqueName: \"kubernetes.io/projected/311d9086-0967-491c-91c7-0c783685fb7e-kube-api-access-6vkfn\") pod \"311d9086-0967-491c-91c7-0c783685fb7e\" (UID: \"311d9086-0967-491c-91c7-0c783685fb7e\") " Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.219015 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/311d9086-0967-491c-91c7-0c783685fb7e-kube-api-access-6vkfn" (OuterVolumeSpecName: "kube-api-access-6vkfn") pod "311d9086-0967-491c-91c7-0c783685fb7e" (UID: "311d9086-0967-491c-91c7-0c783685fb7e"). InnerVolumeSpecName "kube-api-access-6vkfn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.299681 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.312638 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vkfn\" (UniqueName: \"kubernetes.io/projected/311d9086-0967-491c-91c7-0c783685fb7e-kube-api-access-6vkfn\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.414988 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2mpqh\" (UniqueName: \"kubernetes.io/projected/86ca38b0-140d-419f-9a96-6c9b3825a1ea-kube-api-access-2mpqh\") pod \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\" (UID: \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\") " Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.415038 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-config\") pod \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\" (UID: \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\") " Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.415081 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-ovsdbserver-sb\") pod \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\" (UID: \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\") " Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.415107 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-dns-svc\") pod \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\" (UID: \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\") " Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.415311 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-ovsdbserver-nb\") pod \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\" (UID: \"86ca38b0-140d-419f-9a96-6c9b3825a1ea\") " Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.452639 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86ca38b0-140d-419f-9a96-6c9b3825a1ea-kube-api-access-2mpqh" (OuterVolumeSpecName: "kube-api-access-2mpqh") pod "86ca38b0-140d-419f-9a96-6c9b3825a1ea" (UID: "86ca38b0-140d-419f-9a96-6c9b3825a1ea"). InnerVolumeSpecName "kube-api-access-2mpqh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.474670 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "86ca38b0-140d-419f-9a96-6c9b3825a1ea" (UID: "86ca38b0-140d-419f-9a96-6c9b3825a1ea"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.488383 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "86ca38b0-140d-419f-9a96-6c9b3825a1ea" (UID: "86ca38b0-140d-419f-9a96-6c9b3825a1ea"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.498200 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "86ca38b0-140d-419f-9a96-6c9b3825a1ea" (UID: "86ca38b0-140d-419f-9a96-6c9b3825a1ea"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.519421 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2mpqh\" (UniqueName: \"kubernetes.io/projected/86ca38b0-140d-419f-9a96-6c9b3825a1ea-kube-api-access-2mpqh\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.519465 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.519476 4710 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.519484 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.531972 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.534316 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-config" (OuterVolumeSpecName: "config") pod "86ca38b0-140d-419f-9a96-6c9b3825a1ea" (UID: "86ca38b0-140d-419f-9a96-6c9b3825a1ea"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.580488 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.620313 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-scripts\") pod \"2a6d7475-ed84-4a7c-986e-9052c176eac0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.620374 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-combined-ca-bundle\") pod \"2a6d7475-ed84-4a7c-986e-9052c176eac0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.620481 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-config-data\") pod \"b524d89d-aca1-473a-a8d5-ab9749fe1322\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.620513 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-combined-ca-bundle\") pod \"b524d89d-aca1-473a-a8d5-ab9749fe1322\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.620542 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a6d7475-ed84-4a7c-986e-9052c176eac0-run-httpd\") pod \"2a6d7475-ed84-4a7c-986e-9052c176eac0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.620618 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-config-data-custom\") pod \"b524d89d-aca1-473a-a8d5-ab9749fe1322\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.620651 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b524d89d-aca1-473a-a8d5-ab9749fe1322-etc-machine-id\") pod \"b524d89d-aca1-473a-a8d5-ab9749fe1322\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.620693 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-scripts\") pod \"b524d89d-aca1-473a-a8d5-ab9749fe1322\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.620711 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ps5tz\" (UniqueName: \"kubernetes.io/projected/b524d89d-aca1-473a-a8d5-ab9749fe1322-kube-api-access-ps5tz\") pod \"b524d89d-aca1-473a-a8d5-ab9749fe1322\" (UID: \"b524d89d-aca1-473a-a8d5-ab9749fe1322\") " Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.620731 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djd78\" (UniqueName: \"kubernetes.io/projected/2a6d7475-ed84-4a7c-986e-9052c176eac0-kube-api-access-djd78\") pod \"2a6d7475-ed84-4a7c-986e-9052c176eac0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.620786 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-sg-core-conf-yaml\") pod \"2a6d7475-ed84-4a7c-986e-9052c176eac0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.620829 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-config-data\") pod \"2a6d7475-ed84-4a7c-986e-9052c176eac0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.620864 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a6d7475-ed84-4a7c-986e-9052c176eac0-log-httpd\") pod \"2a6d7475-ed84-4a7c-986e-9052c176eac0\" (UID: \"2a6d7475-ed84-4a7c-986e-9052c176eac0\") " Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.621332 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86ca38b0-140d-419f-9a96-6c9b3825a1ea-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.622978 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b524d89d-aca1-473a-a8d5-ab9749fe1322-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "b524d89d-aca1-473a-a8d5-ab9749fe1322" (UID: "b524d89d-aca1-473a-a8d5-ab9749fe1322"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.625706 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a6d7475-ed84-4a7c-986e-9052c176eac0-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2a6d7475-ed84-4a7c-986e-9052c176eac0" (UID: "2a6d7475-ed84-4a7c-986e-9052c176eac0"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.626765 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-scripts" (OuterVolumeSpecName: "scripts") pod "2a6d7475-ed84-4a7c-986e-9052c176eac0" (UID: "2a6d7475-ed84-4a7c-986e-9052c176eac0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.630256 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-scripts" (OuterVolumeSpecName: "scripts") pod "b524d89d-aca1-473a-a8d5-ab9749fe1322" (UID: "b524d89d-aca1-473a-a8d5-ab9749fe1322"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.630583 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a6d7475-ed84-4a7c-986e-9052c176eac0-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2a6d7475-ed84-4a7c-986e-9052c176eac0" (UID: "2a6d7475-ed84-4a7c-986e-9052c176eac0"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.632398 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b524d89d-aca1-473a-a8d5-ab9749fe1322-kube-api-access-ps5tz" (OuterVolumeSpecName: "kube-api-access-ps5tz") pod "b524d89d-aca1-473a-a8d5-ab9749fe1322" (UID: "b524d89d-aca1-473a-a8d5-ab9749fe1322"). InnerVolumeSpecName "kube-api-access-ps5tz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.634686 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a6d7475-ed84-4a7c-986e-9052c176eac0-kube-api-access-djd78" (OuterVolumeSpecName: "kube-api-access-djd78") pod "2a6d7475-ed84-4a7c-986e-9052c176eac0" (UID: "2a6d7475-ed84-4a7c-986e-9052c176eac0"). InnerVolumeSpecName "kube-api-access-djd78". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.675815 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-01f6-account-create-gk6mj"] Oct 09 09:21:23 crc kubenswrapper[4710]: E1009 09:21:23.676169 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a6d7475-ed84-4a7c-986e-9052c176eac0" containerName="sg-core" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.676183 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a6d7475-ed84-4a7c-986e-9052c176eac0" containerName="sg-core" Oct 09 09:21:23 crc kubenswrapper[4710]: E1009 09:21:23.676200 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a6d7475-ed84-4a7c-986e-9052c176eac0" containerName="proxy-httpd" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.676206 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a6d7475-ed84-4a7c-986e-9052c176eac0" containerName="proxy-httpd" Oct 09 09:21:23 crc kubenswrapper[4710]: E1009 09:21:23.676217 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b524d89d-aca1-473a-a8d5-ab9749fe1322" containerName="probe" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.676223 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="b524d89d-aca1-473a-a8d5-ab9749fe1322" containerName="probe" Oct 09 09:21:23 crc kubenswrapper[4710]: E1009 09:21:23.676238 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86ca38b0-140d-419f-9a96-6c9b3825a1ea" containerName="init" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.676244 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="86ca38b0-140d-419f-9a96-6c9b3825a1ea" containerName="init" Oct 09 09:21:23 crc kubenswrapper[4710]: E1009 09:21:23.676253 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86ca38b0-140d-419f-9a96-6c9b3825a1ea" containerName="dnsmasq-dns" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.676259 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="86ca38b0-140d-419f-9a96-6c9b3825a1ea" containerName="dnsmasq-dns" Oct 09 09:21:23 crc kubenswrapper[4710]: E1009 09:21:23.676269 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03740f1c-82bd-434e-9544-be5dc802648a" containerName="mariadb-database-create" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.676285 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="03740f1c-82bd-434e-9544-be5dc802648a" containerName="mariadb-database-create" Oct 09 09:21:23 crc kubenswrapper[4710]: E1009 09:21:23.676297 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a6d7475-ed84-4a7c-986e-9052c176eac0" containerName="ceilometer-notification-agent" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.676302 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a6d7475-ed84-4a7c-986e-9052c176eac0" containerName="ceilometer-notification-agent" Oct 09 09:21:23 crc kubenswrapper[4710]: E1009 09:21:23.676310 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba8ecfdc-58cc-457e-971c-13583f94d4ee" containerName="mariadb-database-create" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.676315 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba8ecfdc-58cc-457e-971c-13583f94d4ee" containerName="mariadb-database-create" Oct 09 09:21:23 crc kubenswrapper[4710]: E1009 09:21:23.676325 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a6d7475-ed84-4a7c-986e-9052c176eac0" containerName="ceilometer-central-agent" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.676330 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a6d7475-ed84-4a7c-986e-9052c176eac0" containerName="ceilometer-central-agent" Oct 09 09:21:23 crc kubenswrapper[4710]: E1009 09:21:23.676516 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="311d9086-0967-491c-91c7-0c783685fb7e" containerName="mariadb-database-create" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.676545 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="311d9086-0967-491c-91c7-0c783685fb7e" containerName="mariadb-database-create" Oct 09 09:21:23 crc kubenswrapper[4710]: E1009 09:21:23.676557 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b524d89d-aca1-473a-a8d5-ab9749fe1322" containerName="cinder-scheduler" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.676564 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="b524d89d-aca1-473a-a8d5-ab9749fe1322" containerName="cinder-scheduler" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.676733 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a6d7475-ed84-4a7c-986e-9052c176eac0" containerName="ceilometer-central-agent" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.676744 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a6d7475-ed84-4a7c-986e-9052c176eac0" containerName="ceilometer-notification-agent" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.676761 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a6d7475-ed84-4a7c-986e-9052c176eac0" containerName="sg-core" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.676767 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="311d9086-0967-491c-91c7-0c783685fb7e" containerName="mariadb-database-create" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.676777 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="b524d89d-aca1-473a-a8d5-ab9749fe1322" containerName="probe" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.676786 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a6d7475-ed84-4a7c-986e-9052c176eac0" containerName="proxy-httpd" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.676793 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="03740f1c-82bd-434e-9544-be5dc802648a" containerName="mariadb-database-create" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.676799 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba8ecfdc-58cc-457e-971c-13583f94d4ee" containerName="mariadb-database-create" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.676807 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="b524d89d-aca1-473a-a8d5-ab9749fe1322" containerName="cinder-scheduler" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.676815 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="86ca38b0-140d-419f-9a96-6c9b3825a1ea" containerName="dnsmasq-dns" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.677391 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-01f6-account-create-gk6mj" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.679322 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.682546 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b524d89d-aca1-473a-a8d5-ab9749fe1322" (UID: "b524d89d-aca1-473a-a8d5-ab9749fe1322"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.689312 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-01f6-account-create-gk6mj"] Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.712116 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "2a6d7475-ed84-4a7c-986e-9052c176eac0" (UID: "2a6d7475-ed84-4a7c-986e-9052c176eac0"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.720658 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b524d89d-aca1-473a-a8d5-ab9749fe1322" (UID: "b524d89d-aca1-473a-a8d5-ab9749fe1322"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.722915 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncbxp\" (UniqueName: \"kubernetes.io/projected/283d4932-4284-4a3e-af2e-197c82a84fb9-kube-api-access-ncbxp\") pod \"nova-api-01f6-account-create-gk6mj\" (UID: \"283d4932-4284-4a3e-af2e-197c82a84fb9\") " pod="openstack/nova-api-01f6-account-create-gk6mj" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.723021 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.723038 4710 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a6d7475-ed84-4a7c-986e-9052c176eac0-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.723049 4710 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.723058 4710 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b524d89d-aca1-473a-a8d5-ab9749fe1322-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.723067 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.723075 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ps5tz\" (UniqueName: \"kubernetes.io/projected/b524d89d-aca1-473a-a8d5-ab9749fe1322-kube-api-access-ps5tz\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.723086 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djd78\" (UniqueName: \"kubernetes.io/projected/2a6d7475-ed84-4a7c-986e-9052c176eac0-kube-api-access-djd78\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.723096 4710 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.723103 4710 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2a6d7475-ed84-4a7c-986e-9052c176eac0-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.723111 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.736724 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2a6d7475-ed84-4a7c-986e-9052c176eac0" (UID: "2a6d7475-ed84-4a7c-986e-9052c176eac0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.752631 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-config-data" (OuterVolumeSpecName: "config-data") pod "b524d89d-aca1-473a-a8d5-ab9749fe1322" (UID: "b524d89d-aca1-473a-a8d5-ab9749fe1322"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.765659 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-config-data" (OuterVolumeSpecName: "config-data") pod "2a6d7475-ed84-4a7c-986e-9052c176eac0" (UID: "2a6d7475-ed84-4a7c-986e-9052c176eac0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.824982 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncbxp\" (UniqueName: \"kubernetes.io/projected/283d4932-4284-4a3e-af2e-197c82a84fb9-kube-api-access-ncbxp\") pod \"nova-api-01f6-account-create-gk6mj\" (UID: \"283d4932-4284-4a3e-af2e-197c82a84fb9\") " pod="openstack/nova-api-01f6-account-create-gk6mj" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.825086 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.825101 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a6d7475-ed84-4a7c-986e-9052c176eac0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.825112 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b524d89d-aca1-473a-a8d5-ab9749fe1322-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.852775 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-e773-account-create-99d7l"] Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.853914 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e773-account-create-99d7l" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.856737 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.857958 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncbxp\" (UniqueName: \"kubernetes.io/projected/283d4932-4284-4a3e-af2e-197c82a84fb9-kube-api-access-ncbxp\") pod \"nova-api-01f6-account-create-gk6mj\" (UID: \"283d4932-4284-4a3e-af2e-197c82a84fb9\") " pod="openstack/nova-api-01f6-account-create-gk6mj" Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.870631 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-e773-account-create-99d7l"] Oct 09 09:21:23 crc kubenswrapper[4710]: I1009 09:21:23.926694 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tf257\" (UniqueName: \"kubernetes.io/projected/23e08cbb-62cb-4534-811f-dfce27d4c222-kube-api-access-tf257\") pod \"nova-cell0-e773-account-create-99d7l\" (UID: \"23e08cbb-62cb-4534-811f-dfce27d4c222\") " pod="openstack/nova-cell0-e773-account-create-99d7l" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.010086 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-01f6-account-create-gk6mj" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.026025 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"2a6d7475-ed84-4a7c-986e-9052c176eac0","Type":"ContainerDied","Data":"bd44a810c477c63a594f47743efcc8c7e259b9cda93b04312278fb3605609769"} Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.026352 4710 scope.go:117] "RemoveContainer" containerID="9b0b25cd1e120f846ed0aa6630a4bcd58d7b453a5a690457dd501c0e5e655873" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.026533 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.028057 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tf257\" (UniqueName: \"kubernetes.io/projected/23e08cbb-62cb-4534-811f-dfce27d4c222-kube-api-access-tf257\") pod \"nova-cell0-e773-account-create-99d7l\" (UID: \"23e08cbb-62cb-4534-811f-dfce27d4c222\") " pod="openstack/nova-cell0-e773-account-create-99d7l" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.036036 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b524d89d-aca1-473a-a8d5-ab9749fe1322","Type":"ContainerDied","Data":"ce88f352c23830065c9099caab2ea18ad9a212e7ff58fbffe0c4ee36f35db1b9"} Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.036136 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.040126 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" event={"ID":"86ca38b0-140d-419f-9a96-6c9b3825a1ea","Type":"ContainerDied","Data":"d16b915ec888fb8eb81120578afb08b4001f398e40ae1a9540289647ff53b88b"} Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.040236 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.055648 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-skpfw" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.056087 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tf257\" (UniqueName: \"kubernetes.io/projected/23e08cbb-62cb-4534-811f-dfce27d4c222-kube-api-access-tf257\") pod \"nova-cell0-e773-account-create-99d7l\" (UID: \"23e08cbb-62cb-4534-811f-dfce27d4c222\") " pod="openstack/nova-cell0-e773-account-create-99d7l" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.056191 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"b7779e6d-a6fb-4d03-8636-0dafb2767cbc","Type":"ContainerStarted","Data":"e4bcadfaa49d5b88b8cc077b0cf92604d9882354d51cfc9128b39686bcc3b0db"} Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.099285 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.129209961 podStartE2EDuration="19.099245285s" podCreationTimestamp="2025-10-09 09:21:05 +0000 UTC" firstStartedPulling="2025-10-09 09:21:06.140523058 +0000 UTC m=+989.630631455" lastFinishedPulling="2025-10-09 09:21:23.110558382 +0000 UTC m=+1006.600666779" observedRunningTime="2025-10-09 09:21:24.068249585 +0000 UTC m=+1007.558357983" watchObservedRunningTime="2025-10-09 09:21:24.099245285 +0000 UTC m=+1007.589353682" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.142069 4710 scope.go:117] "RemoveContainer" containerID="e6f848fb347aceb32465798d63489338935dede41d3adb94909b15db6c7a1923" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.176600 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e773-account-create-99d7l" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.226038 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.266508 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.267336 4710 scope.go:117] "RemoveContainer" containerID="004f64e5aadafecc3ec70724ac839508345026caa919e629e1ee82fadd547eb3" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.276836 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.278600 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.284516 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.289778 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.301835 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.322137 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.338473 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9"] Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.348155 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7ff5bdc4b9-vtdw9"] Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.353852 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9883d700-efac-4450-81db-b1faf06dc645-config-data\") pod \"cinder-scheduler-0\" (UID: \"9883d700-efac-4450-81db-b1faf06dc645\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.353903 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhf5b\" (UniqueName: \"kubernetes.io/projected/9883d700-efac-4450-81db-b1faf06dc645-kube-api-access-mhf5b\") pod \"cinder-scheduler-0\" (UID: \"9883d700-efac-4450-81db-b1faf06dc645\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.353957 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9883d700-efac-4450-81db-b1faf06dc645-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9883d700-efac-4450-81db-b1faf06dc645\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.353994 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9883d700-efac-4450-81db-b1faf06dc645-scripts\") pod \"cinder-scheduler-0\" (UID: \"9883d700-efac-4450-81db-b1faf06dc645\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.354027 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9883d700-efac-4450-81db-b1faf06dc645-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9883d700-efac-4450-81db-b1faf06dc645\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.354115 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9883d700-efac-4450-81db-b1faf06dc645-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9883d700-efac-4450-81db-b1faf06dc645\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.367509 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.369491 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.375815 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.377922 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.378103 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.393599 4710 scope.go:117] "RemoveContainer" containerID="2dacd45e9146ad05466bc69726fd4c7841a6ed80df689390968d80b917787840" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.456838 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9883d700-efac-4450-81db-b1faf06dc645-config-data\") pod \"cinder-scheduler-0\" (UID: \"9883d700-efac-4450-81db-b1faf06dc645\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.457209 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhf5b\" (UniqueName: \"kubernetes.io/projected/9883d700-efac-4450-81db-b1faf06dc645-kube-api-access-mhf5b\") pod \"cinder-scheduler-0\" (UID: \"9883d700-efac-4450-81db-b1faf06dc645\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.457330 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-scripts\") pod \"ceilometer-0\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.457414 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c2d3087e-1420-49bf-beee-96677d09996a-log-httpd\") pod \"ceilometer-0\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.457500 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.457593 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9883d700-efac-4450-81db-b1faf06dc645-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9883d700-efac-4450-81db-b1faf06dc645\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.457668 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.457775 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9883d700-efac-4450-81db-b1faf06dc645-scripts\") pod \"cinder-scheduler-0\" (UID: \"9883d700-efac-4450-81db-b1faf06dc645\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.457875 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9883d700-efac-4450-81db-b1faf06dc645-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9883d700-efac-4450-81db-b1faf06dc645\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.457967 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-config-data\") pod \"ceilometer-0\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.458048 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfwrf\" (UniqueName: \"kubernetes.io/projected/c2d3087e-1420-49bf-beee-96677d09996a-kube-api-access-lfwrf\") pod \"ceilometer-0\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.458150 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9883d700-efac-4450-81db-b1faf06dc645-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9883d700-efac-4450-81db-b1faf06dc645\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.458237 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c2d3087e-1420-49bf-beee-96677d09996a-run-httpd\") pod \"ceilometer-0\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.461673 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9883d700-efac-4450-81db-b1faf06dc645-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9883d700-efac-4450-81db-b1faf06dc645\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.470953 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9883d700-efac-4450-81db-b1faf06dc645-scripts\") pod \"cinder-scheduler-0\" (UID: \"9883d700-efac-4450-81db-b1faf06dc645\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.474081 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9883d700-efac-4450-81db-b1faf06dc645-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9883d700-efac-4450-81db-b1faf06dc645\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.475062 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9883d700-efac-4450-81db-b1faf06dc645-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9883d700-efac-4450-81db-b1faf06dc645\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.479178 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9883d700-efac-4450-81db-b1faf06dc645-config-data\") pod \"cinder-scheduler-0\" (UID: \"9883d700-efac-4450-81db-b1faf06dc645\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.489810 4710 scope.go:117] "RemoveContainer" containerID="9e6abe0c4f3d6476fabe1a2f7ed327b4b69c99dd21ddfbf5e2c603ff72f975b3" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.500855 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhf5b\" (UniqueName: \"kubernetes.io/projected/9883d700-efac-4450-81db-b1faf06dc645-kube-api-access-mhf5b\") pod \"cinder-scheduler-0\" (UID: \"9883d700-efac-4450-81db-b1faf06dc645\") " pod="openstack/cinder-scheduler-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.554189 4710 scope.go:117] "RemoveContainer" containerID="27e79067019b49a428b81cb4379080eee1f9832098082547c3ee497a46fe9743" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.563708 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c2d3087e-1420-49bf-beee-96677d09996a-run-httpd\") pod \"ceilometer-0\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.563878 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-scripts\") pod \"ceilometer-0\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.563900 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c2d3087e-1420-49bf-beee-96677d09996a-log-httpd\") pod \"ceilometer-0\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.563919 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.563961 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.564036 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-config-data\") pod \"ceilometer-0\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.564066 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfwrf\" (UniqueName: \"kubernetes.io/projected/c2d3087e-1420-49bf-beee-96677d09996a-kube-api-access-lfwrf\") pod \"ceilometer-0\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.564400 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c2d3087e-1420-49bf-beee-96677d09996a-log-httpd\") pod \"ceilometer-0\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.564777 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c2d3087e-1420-49bf-beee-96677d09996a-run-httpd\") pod \"ceilometer-0\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.573572 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-config-data\") pod \"ceilometer-0\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.576611 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.583904 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.584168 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-scripts\") pod \"ceilometer-0\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.602719 4710 scope.go:117] "RemoveContainer" containerID="a7b05d7dd1c08ff109fc3de6efee93c90bc3904a19ca8249e59899f5a4e1f924" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.606623 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfwrf\" (UniqueName: \"kubernetes.io/projected/c2d3087e-1420-49bf-beee-96677d09996a-kube-api-access-lfwrf\") pod \"ceilometer-0\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.639671 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.711822 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.773395 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-01f6-account-create-gk6mj"] Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.783536 4710 scope.go:117] "RemoveContainer" containerID="fb1060b3f95dacaaba49d3c09376861b1e78e4cf6463f9a4bd6428a39109951a" Oct 09 09:21:24 crc kubenswrapper[4710]: W1009 09:21:24.814034 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod283d4932_4284_4a3e_af2e_197c82a84fb9.slice/crio-8393650c4d555105cc6b625010349c148349b1616ae9b5ce6de03824879468b7 WatchSource:0}: Error finding container 8393650c4d555105cc6b625010349c148349b1616ae9b5ce6de03824879468b7: Status 404 returned error can't find the container with id 8393650c4d555105cc6b625010349c148349b1616ae9b5ce6de03824879468b7 Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.824949 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a6d7475-ed84-4a7c-986e-9052c176eac0" path="/var/lib/kubelet/pods/2a6d7475-ed84-4a7c-986e-9052c176eac0/volumes" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.825728 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86ca38b0-140d-419f-9a96-6c9b3825a1ea" path="/var/lib/kubelet/pods/86ca38b0-140d-419f-9a96-6c9b3825a1ea/volumes" Oct 09 09:21:24 crc kubenswrapper[4710]: I1009 09:21:24.826254 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b524d89d-aca1-473a-a8d5-ab9749fe1322" path="/var/lib/kubelet/pods/b524d89d-aca1-473a-a8d5-ab9749fe1322/volumes" Oct 09 09:21:25 crc kubenswrapper[4710]: I1009 09:21:25.081309 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-01f6-account-create-gk6mj" event={"ID":"283d4932-4284-4a3e-af2e-197c82a84fb9","Type":"ContainerStarted","Data":"8393650c4d555105cc6b625010349c148349b1616ae9b5ce6de03824879468b7"} Oct 09 09:21:25 crc kubenswrapper[4710]: I1009 09:21:25.146360 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-e773-account-create-99d7l"] Oct 09 09:21:25 crc kubenswrapper[4710]: I1009 09:21:25.297804 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 09 09:21:25 crc kubenswrapper[4710]: I1009 09:21:25.511994 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:21:26 crc kubenswrapper[4710]: I1009 09:21:26.118084 4710 generic.go:334] "Generic (PLEG): container finished" podID="283d4932-4284-4a3e-af2e-197c82a84fb9" containerID="21294f6fa11199db134d7b7759e234647241c6c5b71de84a903f6e7cde0dadf7" exitCode=0 Oct 09 09:21:26 crc kubenswrapper[4710]: I1009 09:21:26.118196 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-01f6-account-create-gk6mj" event={"ID":"283d4932-4284-4a3e-af2e-197c82a84fb9","Type":"ContainerDied","Data":"21294f6fa11199db134d7b7759e234647241c6c5b71de84a903f6e7cde0dadf7"} Oct 09 09:21:26 crc kubenswrapper[4710]: I1009 09:21:26.120649 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c2d3087e-1420-49bf-beee-96677d09996a","Type":"ContainerStarted","Data":"80c9588a8f20c31973179521db93dc158b2c8074ae03a956eac5761e8f80b037"} Oct 09 09:21:26 crc kubenswrapper[4710]: I1009 09:21:26.123732 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9883d700-efac-4450-81db-b1faf06dc645","Type":"ContainerStarted","Data":"fa1b45f314ffbefe57d70958f21f29548364e6ab6ccb1bd8a6d3d65fc22fe7e2"} Oct 09 09:21:26 crc kubenswrapper[4710]: I1009 09:21:26.123791 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9883d700-efac-4450-81db-b1faf06dc645","Type":"ContainerStarted","Data":"db5dff666a827ec8d6348996c19fc3d25367cd41822fd3988f47c52d38a0f813"} Oct 09 09:21:26 crc kubenswrapper[4710]: I1009 09:21:26.126076 4710 generic.go:334] "Generic (PLEG): container finished" podID="23e08cbb-62cb-4534-811f-dfce27d4c222" containerID="78135de6aabe93a9be1cc7ccb736691fe78ed289d1c61ae5fd216ed656211d52" exitCode=0 Oct 09 09:21:26 crc kubenswrapper[4710]: I1009 09:21:26.126119 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-e773-account-create-99d7l" event={"ID":"23e08cbb-62cb-4534-811f-dfce27d4c222","Type":"ContainerDied","Data":"78135de6aabe93a9be1cc7ccb736691fe78ed289d1c61ae5fd216ed656211d52"} Oct 09 09:21:26 crc kubenswrapper[4710]: I1009 09:21:26.126147 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-e773-account-create-99d7l" event={"ID":"23e08cbb-62cb-4534-811f-dfce27d4c222","Type":"ContainerStarted","Data":"d21f53a9c6e81a7a5af400b57199a63f68fc9a3ef13c9719965d32038a31fb1a"} Oct 09 09:21:27 crc kubenswrapper[4710]: I1009 09:21:27.135743 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c2d3087e-1420-49bf-beee-96677d09996a","Type":"ContainerStarted","Data":"55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae"} Oct 09 09:21:27 crc kubenswrapper[4710]: I1009 09:21:27.138508 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9883d700-efac-4450-81db-b1faf06dc645","Type":"ContainerStarted","Data":"dd6c60bfcf0810aabe09c60d5d1eb4e7db3e3ee741bab7ae1fc25dc80d1abe3b"} Oct 09 09:21:27 crc kubenswrapper[4710]: I1009 09:21:27.163511 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.163493359 podStartE2EDuration="3.163493359s" podCreationTimestamp="2025-10-09 09:21:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:21:27.161775229 +0000 UTC m=+1010.651883626" watchObservedRunningTime="2025-10-09 09:21:27.163493359 +0000 UTC m=+1010.653601755" Oct 09 09:21:28 crc kubenswrapper[4710]: I1009 09:21:28.320787 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-01f6-account-create-gk6mj" Oct 09 09:21:28 crc kubenswrapper[4710]: I1009 09:21:28.325805 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e773-account-create-99d7l" Oct 09 09:21:28 crc kubenswrapper[4710]: I1009 09:21:28.472484 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ncbxp\" (UniqueName: \"kubernetes.io/projected/283d4932-4284-4a3e-af2e-197c82a84fb9-kube-api-access-ncbxp\") pod \"283d4932-4284-4a3e-af2e-197c82a84fb9\" (UID: \"283d4932-4284-4a3e-af2e-197c82a84fb9\") " Oct 09 09:21:28 crc kubenswrapper[4710]: I1009 09:21:28.472574 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tf257\" (UniqueName: \"kubernetes.io/projected/23e08cbb-62cb-4534-811f-dfce27d4c222-kube-api-access-tf257\") pod \"23e08cbb-62cb-4534-811f-dfce27d4c222\" (UID: \"23e08cbb-62cb-4534-811f-dfce27d4c222\") " Oct 09 09:21:28 crc kubenswrapper[4710]: I1009 09:21:28.478998 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23e08cbb-62cb-4534-811f-dfce27d4c222-kube-api-access-tf257" (OuterVolumeSpecName: "kube-api-access-tf257") pod "23e08cbb-62cb-4534-811f-dfce27d4c222" (UID: "23e08cbb-62cb-4534-811f-dfce27d4c222"). InnerVolumeSpecName "kube-api-access-tf257". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:21:28 crc kubenswrapper[4710]: I1009 09:21:28.482651 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/283d4932-4284-4a3e-af2e-197c82a84fb9-kube-api-access-ncbxp" (OuterVolumeSpecName: "kube-api-access-ncbxp") pod "283d4932-4284-4a3e-af2e-197c82a84fb9" (UID: "283d4932-4284-4a3e-af2e-197c82a84fb9"). InnerVolumeSpecName "kube-api-access-ncbxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:21:28 crc kubenswrapper[4710]: I1009 09:21:28.575462 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ncbxp\" (UniqueName: \"kubernetes.io/projected/283d4932-4284-4a3e-af2e-197c82a84fb9-kube-api-access-ncbxp\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:28 crc kubenswrapper[4710]: I1009 09:21:28.575776 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tf257\" (UniqueName: \"kubernetes.io/projected/23e08cbb-62cb-4534-811f-dfce27d4c222-kube-api-access-tf257\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:29 crc kubenswrapper[4710]: I1009 09:21:29.157660 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-01f6-account-create-gk6mj" Oct 09 09:21:29 crc kubenswrapper[4710]: I1009 09:21:29.158745 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-01f6-account-create-gk6mj" event={"ID":"283d4932-4284-4a3e-af2e-197c82a84fb9","Type":"ContainerDied","Data":"8393650c4d555105cc6b625010349c148349b1616ae9b5ce6de03824879468b7"} Oct 09 09:21:29 crc kubenswrapper[4710]: I1009 09:21:29.158780 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8393650c4d555105cc6b625010349c148349b1616ae9b5ce6de03824879468b7" Oct 09 09:21:29 crc kubenswrapper[4710]: I1009 09:21:29.161098 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c2d3087e-1420-49bf-beee-96677d09996a","Type":"ContainerStarted","Data":"85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3"} Oct 09 09:21:29 crc kubenswrapper[4710]: I1009 09:21:29.164755 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-e773-account-create-99d7l" event={"ID":"23e08cbb-62cb-4534-811f-dfce27d4c222","Type":"ContainerDied","Data":"d21f53a9c6e81a7a5af400b57199a63f68fc9a3ef13c9719965d32038a31fb1a"} Oct 09 09:21:29 crc kubenswrapper[4710]: I1009 09:21:29.164807 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d21f53a9c6e81a7a5af400b57199a63f68fc9a3ef13c9719965d32038a31fb1a" Oct 09 09:21:29 crc kubenswrapper[4710]: I1009 09:21:29.164774 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e773-account-create-99d7l" Oct 09 09:21:29 crc kubenswrapper[4710]: I1009 09:21:29.639960 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 09 09:21:30 crc kubenswrapper[4710]: I1009 09:21:30.174970 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c2d3087e-1420-49bf-beee-96677d09996a","Type":"ContainerStarted","Data":"e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398"} Oct 09 09:21:32 crc kubenswrapper[4710]: I1009 09:21:32.192470 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:21:32 crc kubenswrapper[4710]: I1009 09:21:32.192812 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c2d3087e-1420-49bf-beee-96677d09996a","Type":"ContainerStarted","Data":"86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3"} Oct 09 09:21:32 crc kubenswrapper[4710]: I1009 09:21:32.192846 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 09 09:21:32 crc kubenswrapper[4710]: I1009 09:21:32.217264 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.441207856 podStartE2EDuration="8.217246334s" podCreationTimestamp="2025-10-09 09:21:24 +0000 UTC" firstStartedPulling="2025-10-09 09:21:25.54952712 +0000 UTC m=+1009.039635517" lastFinishedPulling="2025-10-09 09:21:31.325565598 +0000 UTC m=+1014.815673995" observedRunningTime="2025-10-09 09:21:32.209461434 +0000 UTC m=+1015.699569831" watchObservedRunningTime="2025-10-09 09:21:32.217246334 +0000 UTC m=+1015.707354731" Oct 09 09:21:33 crc kubenswrapper[4710]: I1009 09:21:33.200858 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c2d3087e-1420-49bf-beee-96677d09996a" containerName="ceilometer-central-agent" containerID="cri-o://55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae" gracePeriod=30 Oct 09 09:21:33 crc kubenswrapper[4710]: I1009 09:21:33.200953 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c2d3087e-1420-49bf-beee-96677d09996a" containerName="sg-core" containerID="cri-o://e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398" gracePeriod=30 Oct 09 09:21:33 crc kubenswrapper[4710]: I1009 09:21:33.200983 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c2d3087e-1420-49bf-beee-96677d09996a" containerName="proxy-httpd" containerID="cri-o://86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3" gracePeriod=30 Oct 09 09:21:33 crc kubenswrapper[4710]: I1009 09:21:33.201065 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c2d3087e-1420-49bf-beee-96677d09996a" containerName="ceilometer-notification-agent" containerID="cri-o://85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3" gracePeriod=30 Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.045036 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.066271 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-4b56-account-create-658sj"] Oct 09 09:21:34 crc kubenswrapper[4710]: E1009 09:21:34.066657 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2d3087e-1420-49bf-beee-96677d09996a" containerName="sg-core" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.066675 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2d3087e-1420-49bf-beee-96677d09996a" containerName="sg-core" Oct 09 09:21:34 crc kubenswrapper[4710]: E1009 09:21:34.066689 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2d3087e-1420-49bf-beee-96677d09996a" containerName="ceilometer-central-agent" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.066695 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2d3087e-1420-49bf-beee-96677d09996a" containerName="ceilometer-central-agent" Oct 09 09:21:34 crc kubenswrapper[4710]: E1009 09:21:34.066708 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23e08cbb-62cb-4534-811f-dfce27d4c222" containerName="mariadb-account-create" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.066713 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="23e08cbb-62cb-4534-811f-dfce27d4c222" containerName="mariadb-account-create" Oct 09 09:21:34 crc kubenswrapper[4710]: E1009 09:21:34.066728 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="283d4932-4284-4a3e-af2e-197c82a84fb9" containerName="mariadb-account-create" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.066734 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="283d4932-4284-4a3e-af2e-197c82a84fb9" containerName="mariadb-account-create" Oct 09 09:21:34 crc kubenswrapper[4710]: E1009 09:21:34.066744 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2d3087e-1420-49bf-beee-96677d09996a" containerName="ceilometer-notification-agent" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.066750 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2d3087e-1420-49bf-beee-96677d09996a" containerName="ceilometer-notification-agent" Oct 09 09:21:34 crc kubenswrapper[4710]: E1009 09:21:34.066764 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2d3087e-1420-49bf-beee-96677d09996a" containerName="proxy-httpd" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.066769 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2d3087e-1420-49bf-beee-96677d09996a" containerName="proxy-httpd" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.066932 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="23e08cbb-62cb-4534-811f-dfce27d4c222" containerName="mariadb-account-create" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.066947 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2d3087e-1420-49bf-beee-96677d09996a" containerName="sg-core" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.066958 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2d3087e-1420-49bf-beee-96677d09996a" containerName="proxy-httpd" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.066969 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2d3087e-1420-49bf-beee-96677d09996a" containerName="ceilometer-central-agent" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.066979 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="283d4932-4284-4a3e-af2e-197c82a84fb9" containerName="mariadb-account-create" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.066990 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2d3087e-1420-49bf-beee-96677d09996a" containerName="ceilometer-notification-agent" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.067561 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4b56-account-create-658sj" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.076707 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-4b56-account-create-658sj"] Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.082716 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.200876 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-combined-ca-bundle\") pod \"c2d3087e-1420-49bf-beee-96677d09996a\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.200949 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c2d3087e-1420-49bf-beee-96677d09996a-log-httpd\") pod \"c2d3087e-1420-49bf-beee-96677d09996a\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.201055 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c2d3087e-1420-49bf-beee-96677d09996a-run-httpd\") pod \"c2d3087e-1420-49bf-beee-96677d09996a\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.201095 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-scripts\") pod \"c2d3087e-1420-49bf-beee-96677d09996a\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.201116 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-config-data\") pod \"c2d3087e-1420-49bf-beee-96677d09996a\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.201168 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfwrf\" (UniqueName: \"kubernetes.io/projected/c2d3087e-1420-49bf-beee-96677d09996a-kube-api-access-lfwrf\") pod \"c2d3087e-1420-49bf-beee-96677d09996a\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.201185 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-sg-core-conf-yaml\") pod \"c2d3087e-1420-49bf-beee-96677d09996a\" (UID: \"c2d3087e-1420-49bf-beee-96677d09996a\") " Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.201385 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2d3087e-1420-49bf-beee-96677d09996a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c2d3087e-1420-49bf-beee-96677d09996a" (UID: "c2d3087e-1420-49bf-beee-96677d09996a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.201655 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxtrr\" (UniqueName: \"kubernetes.io/projected/9b77a877-c31e-4ede-831f-2dc85c37d957-kube-api-access-lxtrr\") pod \"nova-cell1-4b56-account-create-658sj\" (UID: \"9b77a877-c31e-4ede-831f-2dc85c37d957\") " pod="openstack/nova-cell1-4b56-account-create-658sj" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.201690 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2d3087e-1420-49bf-beee-96677d09996a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c2d3087e-1420-49bf-beee-96677d09996a" (UID: "c2d3087e-1420-49bf-beee-96677d09996a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.201904 4710 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c2d3087e-1420-49bf-beee-96677d09996a-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.201917 4710 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c2d3087e-1420-49bf-beee-96677d09996a-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.212356 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-scripts" (OuterVolumeSpecName: "scripts") pod "c2d3087e-1420-49bf-beee-96677d09996a" (UID: "c2d3087e-1420-49bf-beee-96677d09996a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.212464 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2d3087e-1420-49bf-beee-96677d09996a-kube-api-access-lfwrf" (OuterVolumeSpecName: "kube-api-access-lfwrf") pod "c2d3087e-1420-49bf-beee-96677d09996a" (UID: "c2d3087e-1420-49bf-beee-96677d09996a"). InnerVolumeSpecName "kube-api-access-lfwrf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.221959 4710 generic.go:334] "Generic (PLEG): container finished" podID="c2d3087e-1420-49bf-beee-96677d09996a" containerID="86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3" exitCode=0 Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.221993 4710 generic.go:334] "Generic (PLEG): container finished" podID="c2d3087e-1420-49bf-beee-96677d09996a" containerID="e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398" exitCode=2 Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.222000 4710 generic.go:334] "Generic (PLEG): container finished" podID="c2d3087e-1420-49bf-beee-96677d09996a" containerID="85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3" exitCode=0 Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.222007 4710 generic.go:334] "Generic (PLEG): container finished" podID="c2d3087e-1420-49bf-beee-96677d09996a" containerID="55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae" exitCode=0 Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.222029 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c2d3087e-1420-49bf-beee-96677d09996a","Type":"ContainerDied","Data":"86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3"} Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.222062 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c2d3087e-1420-49bf-beee-96677d09996a","Type":"ContainerDied","Data":"e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398"} Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.222073 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c2d3087e-1420-49bf-beee-96677d09996a","Type":"ContainerDied","Data":"85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3"} Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.222081 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c2d3087e-1420-49bf-beee-96677d09996a","Type":"ContainerDied","Data":"55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae"} Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.222090 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c2d3087e-1420-49bf-beee-96677d09996a","Type":"ContainerDied","Data":"80c9588a8f20c31973179521db93dc158b2c8074ae03a956eac5761e8f80b037"} Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.222107 4710 scope.go:117] "RemoveContainer" containerID="86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.222242 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.252175 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-r9l65"] Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.253341 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-r9l65" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.258019 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-747m8" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.258332 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.263101 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.275660 4710 scope.go:117] "RemoveContainer" containerID="e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.277819 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-r9l65"] Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.314493 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxtrr\" (UniqueName: \"kubernetes.io/projected/9b77a877-c31e-4ede-831f-2dc85c37d957-kube-api-access-lxtrr\") pod \"nova-cell1-4b56-account-create-658sj\" (UID: \"9b77a877-c31e-4ede-831f-2dc85c37d957\") " pod="openstack/nova-cell1-4b56-account-create-658sj" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.314741 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.314758 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfwrf\" (UniqueName: \"kubernetes.io/projected/c2d3087e-1420-49bf-beee-96677d09996a-kube-api-access-lfwrf\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.354935 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxtrr\" (UniqueName: \"kubernetes.io/projected/9b77a877-c31e-4ede-831f-2dc85c37d957-kube-api-access-lxtrr\") pod \"nova-cell1-4b56-account-create-658sj\" (UID: \"9b77a877-c31e-4ede-831f-2dc85c37d957\") " pod="openstack/nova-cell1-4b56-account-create-658sj" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.388864 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4b56-account-create-658sj" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.407564 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c2d3087e-1420-49bf-beee-96677d09996a" (UID: "c2d3087e-1420-49bf-beee-96677d09996a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.440612 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb71d13-b228-499f-b4ec-0fc78dfb5663-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-r9l65\" (UID: \"acb71d13-b228-499f-b4ec-0fc78dfb5663\") " pod="openstack/nova-cell0-conductor-db-sync-r9l65" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.440752 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acb71d13-b228-499f-b4ec-0fc78dfb5663-scripts\") pod \"nova-cell0-conductor-db-sync-r9l65\" (UID: \"acb71d13-b228-499f-b4ec-0fc78dfb5663\") " pod="openstack/nova-cell0-conductor-db-sync-r9l65" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.440969 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb71d13-b228-499f-b4ec-0fc78dfb5663-config-data\") pod \"nova-cell0-conductor-db-sync-r9l65\" (UID: \"acb71d13-b228-499f-b4ec-0fc78dfb5663\") " pod="openstack/nova-cell0-conductor-db-sync-r9l65" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.441040 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5z7t\" (UniqueName: \"kubernetes.io/projected/acb71d13-b228-499f-b4ec-0fc78dfb5663-kube-api-access-k5z7t\") pod \"nova-cell0-conductor-db-sync-r9l65\" (UID: \"acb71d13-b228-499f-b4ec-0fc78dfb5663\") " pod="openstack/nova-cell0-conductor-db-sync-r9l65" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.441192 4710 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.478265 4710 scope.go:117] "RemoveContainer" containerID="85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.490647 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c2d3087e-1420-49bf-beee-96677d09996a" (UID: "c2d3087e-1420-49bf-beee-96677d09996a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.517788 4710 scope.go:117] "RemoveContainer" containerID="55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.519825 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-config-data" (OuterVolumeSpecName: "config-data") pod "c2d3087e-1420-49bf-beee-96677d09996a" (UID: "c2d3087e-1420-49bf-beee-96677d09996a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.542749 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb71d13-b228-499f-b4ec-0fc78dfb5663-config-data\") pod \"nova-cell0-conductor-db-sync-r9l65\" (UID: \"acb71d13-b228-499f-b4ec-0fc78dfb5663\") " pod="openstack/nova-cell0-conductor-db-sync-r9l65" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.542796 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5z7t\" (UniqueName: \"kubernetes.io/projected/acb71d13-b228-499f-b4ec-0fc78dfb5663-kube-api-access-k5z7t\") pod \"nova-cell0-conductor-db-sync-r9l65\" (UID: \"acb71d13-b228-499f-b4ec-0fc78dfb5663\") " pod="openstack/nova-cell0-conductor-db-sync-r9l65" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.542944 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb71d13-b228-499f-b4ec-0fc78dfb5663-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-r9l65\" (UID: \"acb71d13-b228-499f-b4ec-0fc78dfb5663\") " pod="openstack/nova-cell0-conductor-db-sync-r9l65" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.542999 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acb71d13-b228-499f-b4ec-0fc78dfb5663-scripts\") pod \"nova-cell0-conductor-db-sync-r9l65\" (UID: \"acb71d13-b228-499f-b4ec-0fc78dfb5663\") " pod="openstack/nova-cell0-conductor-db-sync-r9l65" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.543164 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.543177 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2d3087e-1420-49bf-beee-96677d09996a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.554277 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acb71d13-b228-499f-b4ec-0fc78dfb5663-scripts\") pod \"nova-cell0-conductor-db-sync-r9l65\" (UID: \"acb71d13-b228-499f-b4ec-0fc78dfb5663\") " pod="openstack/nova-cell0-conductor-db-sync-r9l65" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.555219 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb71d13-b228-499f-b4ec-0fc78dfb5663-config-data\") pod \"nova-cell0-conductor-db-sync-r9l65\" (UID: \"acb71d13-b228-499f-b4ec-0fc78dfb5663\") " pod="openstack/nova-cell0-conductor-db-sync-r9l65" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.555251 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb71d13-b228-499f-b4ec-0fc78dfb5663-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-r9l65\" (UID: \"acb71d13-b228-499f-b4ec-0fc78dfb5663\") " pod="openstack/nova-cell0-conductor-db-sync-r9l65" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.580788 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5z7t\" (UniqueName: \"kubernetes.io/projected/acb71d13-b228-499f-b4ec-0fc78dfb5663-kube-api-access-k5z7t\") pod \"nova-cell0-conductor-db-sync-r9l65\" (UID: \"acb71d13-b228-499f-b4ec-0fc78dfb5663\") " pod="openstack/nova-cell0-conductor-db-sync-r9l65" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.584056 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-r9l65" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.589523 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.605221 4710 scope.go:117] "RemoveContainer" containerID="86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3" Oct 09 09:21:34 crc kubenswrapper[4710]: E1009 09:21:34.606335 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3\": container with ID starting with 86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3 not found: ID does not exist" containerID="86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.606386 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3"} err="failed to get container status \"86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3\": rpc error: code = NotFound desc = could not find container \"86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3\": container with ID starting with 86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3 not found: ID does not exist" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.606417 4710 scope.go:117] "RemoveContainer" containerID="e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.610903 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:21:34 crc kubenswrapper[4710]: E1009 09:21:34.612563 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398\": container with ID starting with e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398 not found: ID does not exist" containerID="e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.612600 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398"} err="failed to get container status \"e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398\": rpc error: code = NotFound desc = could not find container \"e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398\": container with ID starting with e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398 not found: ID does not exist" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.612624 4710 scope.go:117] "RemoveContainer" containerID="85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3" Oct 09 09:21:34 crc kubenswrapper[4710]: E1009 09:21:34.613891 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3\": container with ID starting with 85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3 not found: ID does not exist" containerID="85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.613916 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3"} err="failed to get container status \"85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3\": rpc error: code = NotFound desc = could not find container \"85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3\": container with ID starting with 85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3 not found: ID does not exist" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.613934 4710 scope.go:117] "RemoveContainer" containerID="55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae" Oct 09 09:21:34 crc kubenswrapper[4710]: E1009 09:21:34.614605 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae\": container with ID starting with 55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae not found: ID does not exist" containerID="55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.614629 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae"} err="failed to get container status \"55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae\": rpc error: code = NotFound desc = could not find container \"55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae\": container with ID starting with 55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae not found: ID does not exist" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.614643 4710 scope.go:117] "RemoveContainer" containerID="86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.614873 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3"} err="failed to get container status \"86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3\": rpc error: code = NotFound desc = could not find container \"86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3\": container with ID starting with 86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3 not found: ID does not exist" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.614926 4710 scope.go:117] "RemoveContainer" containerID="e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.615163 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398"} err="failed to get container status \"e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398\": rpc error: code = NotFound desc = could not find container \"e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398\": container with ID starting with e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398 not found: ID does not exist" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.615182 4710 scope.go:117] "RemoveContainer" containerID="85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.616172 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3"} err="failed to get container status \"85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3\": rpc error: code = NotFound desc = could not find container \"85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3\": container with ID starting with 85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3 not found: ID does not exist" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.616194 4710 scope.go:117] "RemoveContainer" containerID="55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.616576 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae"} err="failed to get container status \"55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae\": rpc error: code = NotFound desc = could not find container \"55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae\": container with ID starting with 55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae not found: ID does not exist" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.616615 4710 scope.go:117] "RemoveContainer" containerID="86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.619244 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3"} err="failed to get container status \"86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3\": rpc error: code = NotFound desc = could not find container \"86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3\": container with ID starting with 86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3 not found: ID does not exist" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.619270 4710 scope.go:117] "RemoveContainer" containerID="e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.619390 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.621503 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.623078 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.625927 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.628374 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.630805 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398"} err="failed to get container status \"e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398\": rpc error: code = NotFound desc = could not find container \"e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398\": container with ID starting with e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398 not found: ID does not exist" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.630827 4710 scope.go:117] "RemoveContainer" containerID="85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.644641 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3"} err="failed to get container status \"85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3\": rpc error: code = NotFound desc = could not find container \"85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3\": container with ID starting with 85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3 not found: ID does not exist" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.644669 4710 scope.go:117] "RemoveContainer" containerID="55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.645721 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae"} err="failed to get container status \"55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae\": rpc error: code = NotFound desc = could not find container \"55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae\": container with ID starting with 55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae not found: ID does not exist" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.645767 4710 scope.go:117] "RemoveContainer" containerID="86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.657326 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3"} err="failed to get container status \"86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3\": rpc error: code = NotFound desc = could not find container \"86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3\": container with ID starting with 86c159ce99a5760ce4dd756e1182eb2720cf2660b84f06341ea6b6b2b33039a3 not found: ID does not exist" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.657366 4710 scope.go:117] "RemoveContainer" containerID="e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.665552 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398"} err="failed to get container status \"e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398\": rpc error: code = NotFound desc = could not find container \"e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398\": container with ID starting with e6531ae24475d9fb9e7616b7f52e15ac71a5c3ab8726394b0a18c1f978416398 not found: ID does not exist" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.665586 4710 scope.go:117] "RemoveContainer" containerID="85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.668900 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3"} err="failed to get container status \"85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3\": rpc error: code = NotFound desc = could not find container \"85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3\": container with ID starting with 85452088da6265e3343288308e454491e4d467494ba52cc7615c49d569dbf9c3 not found: ID does not exist" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.668929 4710 scope.go:117] "RemoveContainer" containerID="55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.672659 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae"} err="failed to get container status \"55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae\": rpc error: code = NotFound desc = could not find container \"55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae\": container with ID starting with 55fa8c2a6771ffc6cf101bb05985e848d6655ab3f893e41413c2ca9a7b6f70ae not found: ID does not exist" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.752414 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.752789 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72717529-ae4b-4785-bf30-18d13068cddd-log-httpd\") pod \"ceilometer-0\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.752931 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72717529-ae4b-4785-bf30-18d13068cddd-run-httpd\") pod \"ceilometer-0\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.752978 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.752998 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-scripts\") pod \"ceilometer-0\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.753013 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-config-data\") pod \"ceilometer-0\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.753042 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-688j8\" (UniqueName: \"kubernetes.io/projected/72717529-ae4b-4785-bf30-18d13068cddd-kube-api-access-688j8\") pod \"ceilometer-0\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.828132 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2d3087e-1420-49bf-beee-96677d09996a" path="/var/lib/kubelet/pods/c2d3087e-1420-49bf-beee-96677d09996a/volumes" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.855411 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72717529-ae4b-4785-bf30-18d13068cddd-log-httpd\") pod \"ceilometer-0\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.855717 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72717529-ae4b-4785-bf30-18d13068cddd-run-httpd\") pod \"ceilometer-0\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.855798 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.855845 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-scripts\") pod \"ceilometer-0\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.855891 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-config-data\") pod \"ceilometer-0\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.855930 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-688j8\" (UniqueName: \"kubernetes.io/projected/72717529-ae4b-4785-bf30-18d13068cddd-kube-api-access-688j8\") pod \"ceilometer-0\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.855985 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.858961 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72717529-ae4b-4785-bf30-18d13068cddd-log-httpd\") pod \"ceilometer-0\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.859183 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72717529-ae4b-4785-bf30-18d13068cddd-run-httpd\") pod \"ceilometer-0\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.861203 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.861760 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.866909 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-scripts\") pod \"ceilometer-0\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.868233 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-config-data\") pod \"ceilometer-0\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.876965 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-688j8\" (UniqueName: \"kubernetes.io/projected/72717529-ae4b-4785-bf30-18d13068cddd-kube-api-access-688j8\") pod \"ceilometer-0\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.949885 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:21:34 crc kubenswrapper[4710]: I1009 09:21:34.993392 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-4b56-account-create-658sj"] Oct 09 09:21:35 crc kubenswrapper[4710]: W1009 09:21:35.012754 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b77a877_c31e_4ede_831f_2dc85c37d957.slice/crio-f8e0a09351bb48e38b24cf83c8090a9aa822243ad3cbc3cc3ef89ceccf0ca628 WatchSource:0}: Error finding container f8e0a09351bb48e38b24cf83c8090a9aa822243ad3cbc3cc3ef89ceccf0ca628: Status 404 returned error can't find the container with id f8e0a09351bb48e38b24cf83c8090a9aa822243ad3cbc3cc3ef89ceccf0ca628 Oct 09 09:21:35 crc kubenswrapper[4710]: I1009 09:21:35.013241 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 09 09:21:35 crc kubenswrapper[4710]: I1009 09:21:35.166820 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-r9l65"] Oct 09 09:21:35 crc kubenswrapper[4710]: W1009 09:21:35.167269 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podacb71d13_b228_499f_b4ec_0fc78dfb5663.slice/crio-b034c6b85b1bb6f33d9d2ee838027aa27645991abb70865489fc76c666949089 WatchSource:0}: Error finding container b034c6b85b1bb6f33d9d2ee838027aa27645991abb70865489fc76c666949089: Status 404 returned error can't find the container with id b034c6b85b1bb6f33d9d2ee838027aa27645991abb70865489fc76c666949089 Oct 09 09:21:35 crc kubenswrapper[4710]: I1009 09:21:35.246098 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4b56-account-create-658sj" event={"ID":"9b77a877-c31e-4ede-831f-2dc85c37d957","Type":"ContainerStarted","Data":"f8e0a09351bb48e38b24cf83c8090a9aa822243ad3cbc3cc3ef89ceccf0ca628"} Oct 09 09:21:35 crc kubenswrapper[4710]: I1009 09:21:35.247165 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-r9l65" event={"ID":"acb71d13-b228-499f-b4ec-0fc78dfb5663","Type":"ContainerStarted","Data":"b034c6b85b1bb6f33d9d2ee838027aa27645991abb70865489fc76c666949089"} Oct 09 09:21:35 crc kubenswrapper[4710]: I1009 09:21:35.504776 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:21:35 crc kubenswrapper[4710]: I1009 09:21:35.872728 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:21:36 crc kubenswrapper[4710]: I1009 09:21:36.257054 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72717529-ae4b-4785-bf30-18d13068cddd","Type":"ContainerStarted","Data":"30a09d9e512b4b1100747f64ba7ff883509cad7067937a6bb895a38f321cce44"} Oct 09 09:21:36 crc kubenswrapper[4710]: I1009 09:21:36.259217 4710 generic.go:334] "Generic (PLEG): container finished" podID="9b77a877-c31e-4ede-831f-2dc85c37d957" containerID="e732afc665a465ed802c0efb27558808cb2189bd7dceb567a8c43a4cf281a248" exitCode=0 Oct 09 09:21:36 crc kubenswrapper[4710]: I1009 09:21:36.259259 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4b56-account-create-658sj" event={"ID":"9b77a877-c31e-4ede-831f-2dc85c37d957","Type":"ContainerDied","Data":"e732afc665a465ed802c0efb27558808cb2189bd7dceb567a8c43a4cf281a248"} Oct 09 09:21:37 crc kubenswrapper[4710]: I1009 09:21:37.277939 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72717529-ae4b-4785-bf30-18d13068cddd","Type":"ContainerStarted","Data":"bad3c1788d92fad48d5cc7148074f32c85f8727f26176e1a6a0e7e3a547f5d2a"} Oct 09 09:21:37 crc kubenswrapper[4710]: I1009 09:21:37.278361 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72717529-ae4b-4785-bf30-18d13068cddd","Type":"ContainerStarted","Data":"675a500a8ea6b2b0fdce0a7ccd41685403b56bad5780cc7bf330dc770969b2fd"} Oct 09 09:21:37 crc kubenswrapper[4710]: I1009 09:21:37.618738 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4b56-account-create-658sj" Oct 09 09:21:37 crc kubenswrapper[4710]: I1009 09:21:37.815762 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxtrr\" (UniqueName: \"kubernetes.io/projected/9b77a877-c31e-4ede-831f-2dc85c37d957-kube-api-access-lxtrr\") pod \"9b77a877-c31e-4ede-831f-2dc85c37d957\" (UID: \"9b77a877-c31e-4ede-831f-2dc85c37d957\") " Oct 09 09:21:37 crc kubenswrapper[4710]: I1009 09:21:37.819077 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b77a877-c31e-4ede-831f-2dc85c37d957-kube-api-access-lxtrr" (OuterVolumeSpecName: "kube-api-access-lxtrr") pod "9b77a877-c31e-4ede-831f-2dc85c37d957" (UID: "9b77a877-c31e-4ede-831f-2dc85c37d957"). InnerVolumeSpecName "kube-api-access-lxtrr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:21:37 crc kubenswrapper[4710]: I1009 09:21:37.918421 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxtrr\" (UniqueName: \"kubernetes.io/projected/9b77a877-c31e-4ede-831f-2dc85c37d957-kube-api-access-lxtrr\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:38 crc kubenswrapper[4710]: I1009 09:21:38.300414 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72717529-ae4b-4785-bf30-18d13068cddd","Type":"ContainerStarted","Data":"dd690eef57295d92a44f1b4ce81eca9d24d7b767a5ec639c25e8647811aad840"} Oct 09 09:21:38 crc kubenswrapper[4710]: I1009 09:21:38.307018 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-4b56-account-create-658sj" event={"ID":"9b77a877-c31e-4ede-831f-2dc85c37d957","Type":"ContainerDied","Data":"f8e0a09351bb48e38b24cf83c8090a9aa822243ad3cbc3cc3ef89ceccf0ca628"} Oct 09 09:21:38 crc kubenswrapper[4710]: I1009 09:21:38.307060 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f8e0a09351bb48e38b24cf83c8090a9aa822243ad3cbc3cc3ef89ceccf0ca628" Oct 09 09:21:38 crc kubenswrapper[4710]: I1009 09:21:38.307119 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-4b56-account-create-658sj" Oct 09 09:21:39 crc kubenswrapper[4710]: I1009 09:21:39.325544 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72717529-ae4b-4785-bf30-18d13068cddd","Type":"ContainerStarted","Data":"241e33c60eba537e14168dc8a5e77ace8fc897fd729371f52ab6686ca82a6292"} Oct 09 09:21:39 crc kubenswrapper[4710]: I1009 09:21:39.326174 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="72717529-ae4b-4785-bf30-18d13068cddd" containerName="ceilometer-central-agent" containerID="cri-o://675a500a8ea6b2b0fdce0a7ccd41685403b56bad5780cc7bf330dc770969b2fd" gracePeriod=30 Oct 09 09:21:39 crc kubenswrapper[4710]: I1009 09:21:39.326486 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 09 09:21:39 crc kubenswrapper[4710]: I1009 09:21:39.326867 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="72717529-ae4b-4785-bf30-18d13068cddd" containerName="proxy-httpd" containerID="cri-o://241e33c60eba537e14168dc8a5e77ace8fc897fd729371f52ab6686ca82a6292" gracePeriod=30 Oct 09 09:21:39 crc kubenswrapper[4710]: I1009 09:21:39.326946 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="72717529-ae4b-4785-bf30-18d13068cddd" containerName="sg-core" containerID="cri-o://dd690eef57295d92a44f1b4ce81eca9d24d7b767a5ec639c25e8647811aad840" gracePeriod=30 Oct 09 09:21:39 crc kubenswrapper[4710]: I1009 09:21:39.327010 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="72717529-ae4b-4785-bf30-18d13068cddd" containerName="ceilometer-notification-agent" containerID="cri-o://bad3c1788d92fad48d5cc7148074f32c85f8727f26176e1a6a0e7e3a547f5d2a" gracePeriod=30 Oct 09 09:21:39 crc kubenswrapper[4710]: I1009 09:21:39.354578 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.791020814 podStartE2EDuration="5.354558322s" podCreationTimestamp="2025-10-09 09:21:34 +0000 UTC" firstStartedPulling="2025-10-09 09:21:35.520543048 +0000 UTC m=+1019.010651445" lastFinishedPulling="2025-10-09 09:21:39.084080555 +0000 UTC m=+1022.574188953" observedRunningTime="2025-10-09 09:21:39.351910389 +0000 UTC m=+1022.842018786" watchObservedRunningTime="2025-10-09 09:21:39.354558322 +0000 UTC m=+1022.844666720" Oct 09 09:21:40 crc kubenswrapper[4710]: I1009 09:21:40.345646 4710 generic.go:334] "Generic (PLEG): container finished" podID="72717529-ae4b-4785-bf30-18d13068cddd" containerID="dd690eef57295d92a44f1b4ce81eca9d24d7b767a5ec639c25e8647811aad840" exitCode=2 Oct 09 09:21:40 crc kubenswrapper[4710]: I1009 09:21:40.345678 4710 generic.go:334] "Generic (PLEG): container finished" podID="72717529-ae4b-4785-bf30-18d13068cddd" containerID="bad3c1788d92fad48d5cc7148074f32c85f8727f26176e1a6a0e7e3a547f5d2a" exitCode=0 Oct 09 09:21:40 crc kubenswrapper[4710]: I1009 09:21:40.345685 4710 generic.go:334] "Generic (PLEG): container finished" podID="72717529-ae4b-4785-bf30-18d13068cddd" containerID="675a500a8ea6b2b0fdce0a7ccd41685403b56bad5780cc7bf330dc770969b2fd" exitCode=0 Oct 09 09:21:40 crc kubenswrapper[4710]: I1009 09:21:40.345703 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72717529-ae4b-4785-bf30-18d13068cddd","Type":"ContainerDied","Data":"dd690eef57295d92a44f1b4ce81eca9d24d7b767a5ec639c25e8647811aad840"} Oct 09 09:21:40 crc kubenswrapper[4710]: I1009 09:21:40.345728 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72717529-ae4b-4785-bf30-18d13068cddd","Type":"ContainerDied","Data":"bad3c1788d92fad48d5cc7148074f32c85f8727f26176e1a6a0e7e3a547f5d2a"} Oct 09 09:21:40 crc kubenswrapper[4710]: I1009 09:21:40.345738 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72717529-ae4b-4785-bf30-18d13068cddd","Type":"ContainerDied","Data":"675a500a8ea6b2b0fdce0a7ccd41685403b56bad5780cc7bf330dc770969b2fd"} Oct 09 09:21:42 crc kubenswrapper[4710]: I1009 09:21:42.363816 4710 generic.go:334] "Generic (PLEG): container finished" podID="97a2293b-fc2e-4333-b652-dcda4c009a30" containerID="c32db9a05dbfb1d77905d678d4e00d6a1e85437a9694f70c226b7d1933f1d1cd" exitCode=137 Oct 09 09:21:42 crc kubenswrapper[4710]: I1009 09:21:42.364552 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"97a2293b-fc2e-4333-b652-dcda4c009a30","Type":"ContainerDied","Data":"c32db9a05dbfb1d77905d678d4e00d6a1e85437a9694f70c226b7d1933f1d1cd"} Oct 09 09:21:43 crc kubenswrapper[4710]: I1009 09:21:43.485143 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 09 09:21:43 crc kubenswrapper[4710]: I1009 09:21:43.534009 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8d9m8\" (UniqueName: \"kubernetes.io/projected/97a2293b-fc2e-4333-b652-dcda4c009a30-kube-api-access-8d9m8\") pod \"97a2293b-fc2e-4333-b652-dcda4c009a30\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " Oct 09 09:21:43 crc kubenswrapper[4710]: I1009 09:21:43.534096 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-config-data-custom\") pod \"97a2293b-fc2e-4333-b652-dcda4c009a30\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " Oct 09 09:21:43 crc kubenswrapper[4710]: I1009 09:21:43.534222 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97a2293b-fc2e-4333-b652-dcda4c009a30-logs\") pod \"97a2293b-fc2e-4333-b652-dcda4c009a30\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " Oct 09 09:21:43 crc kubenswrapper[4710]: I1009 09:21:43.534245 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-scripts\") pod \"97a2293b-fc2e-4333-b652-dcda4c009a30\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " Oct 09 09:21:43 crc kubenswrapper[4710]: I1009 09:21:43.534267 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-combined-ca-bundle\") pod \"97a2293b-fc2e-4333-b652-dcda4c009a30\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " Oct 09 09:21:43 crc kubenswrapper[4710]: I1009 09:21:43.534395 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/97a2293b-fc2e-4333-b652-dcda4c009a30-etc-machine-id\") pod \"97a2293b-fc2e-4333-b652-dcda4c009a30\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " Oct 09 09:21:43 crc kubenswrapper[4710]: I1009 09:21:43.534415 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-config-data\") pod \"97a2293b-fc2e-4333-b652-dcda4c009a30\" (UID: \"97a2293b-fc2e-4333-b652-dcda4c009a30\") " Oct 09 09:21:43 crc kubenswrapper[4710]: I1009 09:21:43.534847 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/97a2293b-fc2e-4333-b652-dcda4c009a30-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "97a2293b-fc2e-4333-b652-dcda4c009a30" (UID: "97a2293b-fc2e-4333-b652-dcda4c009a30"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:21:43 crc kubenswrapper[4710]: I1009 09:21:43.535101 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97a2293b-fc2e-4333-b652-dcda4c009a30-logs" (OuterVolumeSpecName: "logs") pod "97a2293b-fc2e-4333-b652-dcda4c009a30" (UID: "97a2293b-fc2e-4333-b652-dcda4c009a30"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:21:43 crc kubenswrapper[4710]: I1009 09:21:43.535385 4710 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/97a2293b-fc2e-4333-b652-dcda4c009a30-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:43 crc kubenswrapper[4710]: I1009 09:21:43.535404 4710 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/97a2293b-fc2e-4333-b652-dcda4c009a30-logs\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:43 crc kubenswrapper[4710]: I1009 09:21:43.539976 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97a2293b-fc2e-4333-b652-dcda4c009a30-kube-api-access-8d9m8" (OuterVolumeSpecName: "kube-api-access-8d9m8") pod "97a2293b-fc2e-4333-b652-dcda4c009a30" (UID: "97a2293b-fc2e-4333-b652-dcda4c009a30"). InnerVolumeSpecName "kube-api-access-8d9m8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:21:43 crc kubenswrapper[4710]: I1009 09:21:43.540097 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-scripts" (OuterVolumeSpecName: "scripts") pod "97a2293b-fc2e-4333-b652-dcda4c009a30" (UID: "97a2293b-fc2e-4333-b652-dcda4c009a30"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:43 crc kubenswrapper[4710]: I1009 09:21:43.553568 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "97a2293b-fc2e-4333-b652-dcda4c009a30" (UID: "97a2293b-fc2e-4333-b652-dcda4c009a30"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:43 crc kubenswrapper[4710]: I1009 09:21:43.574547 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "97a2293b-fc2e-4333-b652-dcda4c009a30" (UID: "97a2293b-fc2e-4333-b652-dcda4c009a30"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:43 crc kubenswrapper[4710]: I1009 09:21:43.582609 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-config-data" (OuterVolumeSpecName: "config-data") pod "97a2293b-fc2e-4333-b652-dcda4c009a30" (UID: "97a2293b-fc2e-4333-b652-dcda4c009a30"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:43 crc kubenswrapper[4710]: I1009 09:21:43.637195 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:43 crc kubenswrapper[4710]: I1009 09:21:43.637225 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:43 crc kubenswrapper[4710]: I1009 09:21:43.637237 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:43 crc kubenswrapper[4710]: I1009 09:21:43.637247 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8d9m8\" (UniqueName: \"kubernetes.io/projected/97a2293b-fc2e-4333-b652-dcda4c009a30-kube-api-access-8d9m8\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:43 crc kubenswrapper[4710]: I1009 09:21:43.637255 4710 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/97a2293b-fc2e-4333-b652-dcda4c009a30-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.387896 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-r9l65" event={"ID":"acb71d13-b228-499f-b4ec-0fc78dfb5663","Type":"ContainerStarted","Data":"e91dfa268a8aeb2b803efa369c2df8dba8063108309f8fde5bf2f9acaf5662aa"} Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.390597 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"97a2293b-fc2e-4333-b652-dcda4c009a30","Type":"ContainerDied","Data":"88c4449e0688b20cecb8d7d8acf8f2ccb3a387f7940cfc7ed0e3840b82454cfc"} Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.390658 4710 scope.go:117] "RemoveContainer" containerID="c32db9a05dbfb1d77905d678d4e00d6a1e85437a9694f70c226b7d1933f1d1cd" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.390695 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.405760 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-r9l65" podStartSLOduration=2.305473954 podStartE2EDuration="10.405744227s" podCreationTimestamp="2025-10-09 09:21:34 +0000 UTC" firstStartedPulling="2025-10-09 09:21:35.169817957 +0000 UTC m=+1018.659926354" lastFinishedPulling="2025-10-09 09:21:43.270088231 +0000 UTC m=+1026.760196627" observedRunningTime="2025-10-09 09:21:44.402887831 +0000 UTC m=+1027.892996228" watchObservedRunningTime="2025-10-09 09:21:44.405744227 +0000 UTC m=+1027.895852624" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.417846 4710 scope.go:117] "RemoveContainer" containerID="0e5de8ce6910add5431d5c339b711b51d4adef07885ec1dfbc1da72c1cca79f7" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.434237 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.445297 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.461209 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 09 09:21:44 crc kubenswrapper[4710]: E1009 09:21:44.466318 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97a2293b-fc2e-4333-b652-dcda4c009a30" containerName="cinder-api-log" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.466453 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="97a2293b-fc2e-4333-b652-dcda4c009a30" containerName="cinder-api-log" Oct 09 09:21:44 crc kubenswrapper[4710]: E1009 09:21:44.466575 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97a2293b-fc2e-4333-b652-dcda4c009a30" containerName="cinder-api" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.466639 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="97a2293b-fc2e-4333-b652-dcda4c009a30" containerName="cinder-api" Oct 09 09:21:44 crc kubenswrapper[4710]: E1009 09:21:44.466715 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b77a877-c31e-4ede-831f-2dc85c37d957" containerName="mariadb-account-create" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.466761 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b77a877-c31e-4ede-831f-2dc85c37d957" containerName="mariadb-account-create" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.467028 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="97a2293b-fc2e-4333-b652-dcda4c009a30" containerName="cinder-api-log" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.467097 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b77a877-c31e-4ede-831f-2dc85c37d957" containerName="mariadb-account-create" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.467151 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="97a2293b-fc2e-4333-b652-dcda4c009a30" containerName="cinder-api" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.468345 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.472670 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.473678 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.473883 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.486364 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.654653 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.654965 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-logs\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.655016 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-public-tls-certs\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.655050 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-scripts\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.655101 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.655131 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-etc-machine-id\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.655160 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-config-data-custom\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.655192 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-config-data\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.655211 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqgvh\" (UniqueName: \"kubernetes.io/projected/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-kube-api-access-bqgvh\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.756859 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-public-tls-certs\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.756925 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-scripts\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.757002 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.757040 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-etc-machine-id\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.757077 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-config-data-custom\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.757112 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-config-data\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.757134 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqgvh\" (UniqueName: \"kubernetes.io/projected/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-kube-api-access-bqgvh\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.757176 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.757233 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-logs\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.757859 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-etc-machine-id\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.758260 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-logs\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.761290 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-config-data-custom\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.761402 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-scripts\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.762424 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.763008 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.764537 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-config-data\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.768679 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-public-tls-certs\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.773904 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqgvh\" (UniqueName: \"kubernetes.io/projected/cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1-kube-api-access-bqgvh\") pod \"cinder-api-0\" (UID: \"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1\") " pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.783661 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 09 09:21:44 crc kubenswrapper[4710]: I1009 09:21:44.831637 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97a2293b-fc2e-4333-b652-dcda4c009a30" path="/var/lib/kubelet/pods/97a2293b-fc2e-4333-b652-dcda4c009a30/volumes" Oct 09 09:21:45 crc kubenswrapper[4710]: I1009 09:21:45.199390 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 09 09:21:45 crc kubenswrapper[4710]: I1009 09:21:45.401861 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1","Type":"ContainerStarted","Data":"17b41ff985dd82a51b7d2f458a019c75292ef442b09685b180586fd6fc95336f"} Oct 09 09:21:46 crc kubenswrapper[4710]: I1009 09:21:46.416329 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1","Type":"ContainerStarted","Data":"021d30eeedd467668ea07373b7f74232eac51d447c68082f0d6971f845de5d8f"} Oct 09 09:21:46 crc kubenswrapper[4710]: I1009 09:21:46.416748 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1","Type":"ContainerStarted","Data":"d14217add1d2a2261119d8a5ae4236fe3fe92eeede30c11646760b501d6b7d58"} Oct 09 09:21:46 crc kubenswrapper[4710]: I1009 09:21:46.416816 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 09 09:21:46 crc kubenswrapper[4710]: I1009 09:21:46.441829 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=2.441801183 podStartE2EDuration="2.441801183s" podCreationTimestamp="2025-10-09 09:21:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:21:46.433550625 +0000 UTC m=+1029.923659022" watchObservedRunningTime="2025-10-09 09:21:46.441801183 +0000 UTC m=+1029.931909580" Oct 09 09:21:48 crc kubenswrapper[4710]: I1009 09:21:48.341053 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="97a2293b-fc2e-4333-b652-dcda4c009a30" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.0.156:8776/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 09:21:49 crc kubenswrapper[4710]: E1009 09:21:49.995510 4710 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podacb71d13_b228_499f_b4ec_0fc78dfb5663.slice/crio-conmon-e91dfa268a8aeb2b803efa369c2df8dba8063108309f8fde5bf2f9acaf5662aa.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podacb71d13_b228_499f_b4ec_0fc78dfb5663.slice/crio-e91dfa268a8aeb2b803efa369c2df8dba8063108309f8fde5bf2f9acaf5662aa.scope\": RecentStats: unable to find data in memory cache]" Oct 09 09:21:50 crc kubenswrapper[4710]: I1009 09:21:50.454102 4710 generic.go:334] "Generic (PLEG): container finished" podID="acb71d13-b228-499f-b4ec-0fc78dfb5663" containerID="e91dfa268a8aeb2b803efa369c2df8dba8063108309f8fde5bf2f9acaf5662aa" exitCode=0 Oct 09 09:21:50 crc kubenswrapper[4710]: I1009 09:21:50.454190 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-r9l65" event={"ID":"acb71d13-b228-499f-b4ec-0fc78dfb5663","Type":"ContainerDied","Data":"e91dfa268a8aeb2b803efa369c2df8dba8063108309f8fde5bf2f9acaf5662aa"} Oct 09 09:21:51 crc kubenswrapper[4710]: I1009 09:21:51.771500 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-r9l65" Oct 09 09:21:51 crc kubenswrapper[4710]: I1009 09:21:51.913764 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb71d13-b228-499f-b4ec-0fc78dfb5663-config-data\") pod \"acb71d13-b228-499f-b4ec-0fc78dfb5663\" (UID: \"acb71d13-b228-499f-b4ec-0fc78dfb5663\") " Oct 09 09:21:51 crc kubenswrapper[4710]: I1009 09:21:51.914604 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acb71d13-b228-499f-b4ec-0fc78dfb5663-scripts\") pod \"acb71d13-b228-499f-b4ec-0fc78dfb5663\" (UID: \"acb71d13-b228-499f-b4ec-0fc78dfb5663\") " Oct 09 09:21:51 crc kubenswrapper[4710]: I1009 09:21:51.914751 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5z7t\" (UniqueName: \"kubernetes.io/projected/acb71d13-b228-499f-b4ec-0fc78dfb5663-kube-api-access-k5z7t\") pod \"acb71d13-b228-499f-b4ec-0fc78dfb5663\" (UID: \"acb71d13-b228-499f-b4ec-0fc78dfb5663\") " Oct 09 09:21:51 crc kubenswrapper[4710]: I1009 09:21:51.914801 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb71d13-b228-499f-b4ec-0fc78dfb5663-combined-ca-bundle\") pod \"acb71d13-b228-499f-b4ec-0fc78dfb5663\" (UID: \"acb71d13-b228-499f-b4ec-0fc78dfb5663\") " Oct 09 09:21:51 crc kubenswrapper[4710]: I1009 09:21:51.921669 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acb71d13-b228-499f-b4ec-0fc78dfb5663-kube-api-access-k5z7t" (OuterVolumeSpecName: "kube-api-access-k5z7t") pod "acb71d13-b228-499f-b4ec-0fc78dfb5663" (UID: "acb71d13-b228-499f-b4ec-0fc78dfb5663"). InnerVolumeSpecName "kube-api-access-k5z7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:21:51 crc kubenswrapper[4710]: I1009 09:21:51.922397 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acb71d13-b228-499f-b4ec-0fc78dfb5663-scripts" (OuterVolumeSpecName: "scripts") pod "acb71d13-b228-499f-b4ec-0fc78dfb5663" (UID: "acb71d13-b228-499f-b4ec-0fc78dfb5663"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:51 crc kubenswrapper[4710]: E1009 09:21:51.938048 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/acb71d13-b228-499f-b4ec-0fc78dfb5663-combined-ca-bundle podName:acb71d13-b228-499f-b4ec-0fc78dfb5663 nodeName:}" failed. No retries permitted until 2025-10-09 09:21:52.438019166 +0000 UTC m=+1035.928127564 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/acb71d13-b228-499f-b4ec-0fc78dfb5663-combined-ca-bundle") pod "acb71d13-b228-499f-b4ec-0fc78dfb5663" (UID: "acb71d13-b228-499f-b4ec-0fc78dfb5663") : error deleting /var/lib/kubelet/pods/acb71d13-b228-499f-b4ec-0fc78dfb5663/volume-subpaths: remove /var/lib/kubelet/pods/acb71d13-b228-499f-b4ec-0fc78dfb5663/volume-subpaths: no such file or directory Oct 09 09:21:51 crc kubenswrapper[4710]: I1009 09:21:51.941390 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acb71d13-b228-499f-b4ec-0fc78dfb5663-config-data" (OuterVolumeSpecName: "config-data") pod "acb71d13-b228-499f-b4ec-0fc78dfb5663" (UID: "acb71d13-b228-499f-b4ec-0fc78dfb5663"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.019809 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acb71d13-b228-499f-b4ec-0fc78dfb5663-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.019845 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5z7t\" (UniqueName: \"kubernetes.io/projected/acb71d13-b228-499f-b4ec-0fc78dfb5663-kube-api-access-k5z7t\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.019857 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb71d13-b228-499f-b4ec-0fc78dfb5663-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.487217 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-r9l65" event={"ID":"acb71d13-b228-499f-b4ec-0fc78dfb5663","Type":"ContainerDied","Data":"b034c6b85b1bb6f33d9d2ee838027aa27645991abb70865489fc76c666949089"} Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.487269 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b034c6b85b1bb6f33d9d2ee838027aa27645991abb70865489fc76c666949089" Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.487410 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-r9l65" Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.528709 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb71d13-b228-499f-b4ec-0fc78dfb5663-combined-ca-bundle\") pod \"acb71d13-b228-499f-b4ec-0fc78dfb5663\" (UID: \"acb71d13-b228-499f-b4ec-0fc78dfb5663\") " Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.548036 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acb71d13-b228-499f-b4ec-0fc78dfb5663-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "acb71d13-b228-499f-b4ec-0fc78dfb5663" (UID: "acb71d13-b228-499f-b4ec-0fc78dfb5663"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.570560 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 09:21:52 crc kubenswrapper[4710]: E1009 09:21:52.571050 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acb71d13-b228-499f-b4ec-0fc78dfb5663" containerName="nova-cell0-conductor-db-sync" Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.571072 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="acb71d13-b228-499f-b4ec-0fc78dfb5663" containerName="nova-cell0-conductor-db-sync" Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.571315 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="acb71d13-b228-499f-b4ec-0fc78dfb5663" containerName="nova-cell0-conductor-db-sync" Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.572034 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.578723 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.630757 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79cbbde5-3252-4efd-a000-95ec002a56bb-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"79cbbde5-3252-4efd-a000-95ec002a56bb\") " pod="openstack/nova-cell0-conductor-0" Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.630950 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79cbbde5-3252-4efd-a000-95ec002a56bb-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"79cbbde5-3252-4efd-a000-95ec002a56bb\") " pod="openstack/nova-cell0-conductor-0" Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.631091 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fv8ct\" (UniqueName: \"kubernetes.io/projected/79cbbde5-3252-4efd-a000-95ec002a56bb-kube-api-access-fv8ct\") pod \"nova-cell0-conductor-0\" (UID: \"79cbbde5-3252-4efd-a000-95ec002a56bb\") " pod="openstack/nova-cell0-conductor-0" Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.631655 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb71d13-b228-499f-b4ec-0fc78dfb5663-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.734542 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79cbbde5-3252-4efd-a000-95ec002a56bb-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"79cbbde5-3252-4efd-a000-95ec002a56bb\") " pod="openstack/nova-cell0-conductor-0" Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.734659 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79cbbde5-3252-4efd-a000-95ec002a56bb-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"79cbbde5-3252-4efd-a000-95ec002a56bb\") " pod="openstack/nova-cell0-conductor-0" Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.734733 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fv8ct\" (UniqueName: \"kubernetes.io/projected/79cbbde5-3252-4efd-a000-95ec002a56bb-kube-api-access-fv8ct\") pod \"nova-cell0-conductor-0\" (UID: \"79cbbde5-3252-4efd-a000-95ec002a56bb\") " pod="openstack/nova-cell0-conductor-0" Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.740605 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79cbbde5-3252-4efd-a000-95ec002a56bb-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"79cbbde5-3252-4efd-a000-95ec002a56bb\") " pod="openstack/nova-cell0-conductor-0" Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.745042 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79cbbde5-3252-4efd-a000-95ec002a56bb-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"79cbbde5-3252-4efd-a000-95ec002a56bb\") " pod="openstack/nova-cell0-conductor-0" Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.748914 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fv8ct\" (UniqueName: \"kubernetes.io/projected/79cbbde5-3252-4efd-a000-95ec002a56bb-kube-api-access-fv8ct\") pod \"nova-cell0-conductor-0\" (UID: \"79cbbde5-3252-4efd-a000-95ec002a56bb\") " pod="openstack/nova-cell0-conductor-0" Oct 09 09:21:52 crc kubenswrapper[4710]: I1009 09:21:52.898776 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 09 09:21:53 crc kubenswrapper[4710]: I1009 09:21:53.321907 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 09 09:21:53 crc kubenswrapper[4710]: I1009 09:21:53.513718 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"79cbbde5-3252-4efd-a000-95ec002a56bb","Type":"ContainerStarted","Data":"5859844b9dd89f42453bf6a13d07c475fefdcbab223661d099b86861566fa940"} Oct 09 09:21:53 crc kubenswrapper[4710]: I1009 09:21:53.513802 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"79cbbde5-3252-4efd-a000-95ec002a56bb","Type":"ContainerStarted","Data":"fa9e1d642e9d0d293da72f9798dd9644d9db213ccda5e77edef0ed233d5a598e"} Oct 09 09:21:53 crc kubenswrapper[4710]: I1009 09:21:53.514512 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 09 09:21:53 crc kubenswrapper[4710]: I1009 09:21:53.533494 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=1.533474489 podStartE2EDuration="1.533474489s" podCreationTimestamp="2025-10-09 09:21:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:21:53.531354872 +0000 UTC m=+1037.021463269" watchObservedRunningTime="2025-10-09 09:21:53.533474489 +0000 UTC m=+1037.023582886" Oct 09 09:21:56 crc kubenswrapper[4710]: I1009 09:21:56.413165 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 09 09:22:02 crc kubenswrapper[4710]: I1009 09:22:02.920788 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.339347 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-nvq24"] Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.341073 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-nvq24" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.348800 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.349178 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.352559 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-nvq24"] Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.439134 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-nvq24\" (UID: \"e80dad3d-99a6-42a9-8d55-c54f02dee2bd\") " pod="openstack/nova-cell0-cell-mapping-nvq24" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.439295 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-scripts\") pod \"nova-cell0-cell-mapping-nvq24\" (UID: \"e80dad3d-99a6-42a9-8d55-c54f02dee2bd\") " pod="openstack/nova-cell0-cell-mapping-nvq24" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.439356 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pnf2\" (UniqueName: \"kubernetes.io/projected/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-kube-api-access-9pnf2\") pod \"nova-cell0-cell-mapping-nvq24\" (UID: \"e80dad3d-99a6-42a9-8d55-c54f02dee2bd\") " pod="openstack/nova-cell0-cell-mapping-nvq24" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.439546 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-config-data\") pod \"nova-cell0-cell-mapping-nvq24\" (UID: \"e80dad3d-99a6-42a9-8d55-c54f02dee2bd\") " pod="openstack/nova-cell0-cell-mapping-nvq24" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.536647 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.538295 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.541772 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-nvq24\" (UID: \"e80dad3d-99a6-42a9-8d55-c54f02dee2bd\") " pod="openstack/nova-cell0-cell-mapping-nvq24" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.541907 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-scripts\") pod \"nova-cell0-cell-mapping-nvq24\" (UID: \"e80dad3d-99a6-42a9-8d55-c54f02dee2bd\") " pod="openstack/nova-cell0-cell-mapping-nvq24" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.541950 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pnf2\" (UniqueName: \"kubernetes.io/projected/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-kube-api-access-9pnf2\") pod \"nova-cell0-cell-mapping-nvq24\" (UID: \"e80dad3d-99a6-42a9-8d55-c54f02dee2bd\") " pod="openstack/nova-cell0-cell-mapping-nvq24" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.542070 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-config-data\") pod \"nova-cell0-cell-mapping-nvq24\" (UID: \"e80dad3d-99a6-42a9-8d55-c54f02dee2bd\") " pod="openstack/nova-cell0-cell-mapping-nvq24" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.548238 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-config-data\") pod \"nova-cell0-cell-mapping-nvq24\" (UID: \"e80dad3d-99a6-42a9-8d55-c54f02dee2bd\") " pod="openstack/nova-cell0-cell-mapping-nvq24" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.554066 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-scripts\") pod \"nova-cell0-cell-mapping-nvq24\" (UID: \"e80dad3d-99a6-42a9-8d55-c54f02dee2bd\") " pod="openstack/nova-cell0-cell-mapping-nvq24" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.554909 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.570298 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-nvq24\" (UID: \"e80dad3d-99a6-42a9-8d55-c54f02dee2bd\") " pod="openstack/nova-cell0-cell-mapping-nvq24" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.579179 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.588950 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pnf2\" (UniqueName: \"kubernetes.io/projected/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-kube-api-access-9pnf2\") pod \"nova-cell0-cell-mapping-nvq24\" (UID: \"e80dad3d-99a6-42a9-8d55-c54f02dee2bd\") " pod="openstack/nova-cell0-cell-mapping-nvq24" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.632673 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.634309 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.642761 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.644540 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qqp4\" (UniqueName: \"kubernetes.io/projected/1e5a3792-1713-4d38-8bf3-ee149ed43e7d-kube-api-access-4qqp4\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e5a3792-1713-4d38-8bf3-ee149ed43e7d\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.644596 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e5a3792-1713-4d38-8bf3-ee149ed43e7d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e5a3792-1713-4d38-8bf3-ee149ed43e7d\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.644833 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e5a3792-1713-4d38-8bf3-ee149ed43e7d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e5a3792-1713-4d38-8bf3-ee149ed43e7d\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.659463 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.660990 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.662111 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-nvq24" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.678830 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.719626 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.744590 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.747785 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e5a3792-1713-4d38-8bf3-ee149ed43e7d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e5a3792-1713-4d38-8bf3-ee149ed43e7d\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.747867 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7f9a021-79c5-43c1-8437-593690db947c-config-data\") pod \"nova-scheduler-0\" (UID: \"f7f9a021-79c5-43c1-8437-593690db947c\") " pod="openstack/nova-scheduler-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.747920 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qqp4\" (UniqueName: \"kubernetes.io/projected/1e5a3792-1713-4d38-8bf3-ee149ed43e7d-kube-api-access-4qqp4\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e5a3792-1713-4d38-8bf3-ee149ed43e7d\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.747952 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dns99\" (UniqueName: \"kubernetes.io/projected/f7f9a021-79c5-43c1-8437-593690db947c-kube-api-access-dns99\") pod \"nova-scheduler-0\" (UID: \"f7f9a021-79c5-43c1-8437-593690db947c\") " pod="openstack/nova-scheduler-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.747984 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e5a3792-1713-4d38-8bf3-ee149ed43e7d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e5a3792-1713-4d38-8bf3-ee149ed43e7d\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.748006 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7f9a021-79c5-43c1-8437-593690db947c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f7f9a021-79c5-43c1-8437-593690db947c\") " pod="openstack/nova-scheduler-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.768578 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e5a3792-1713-4d38-8bf3-ee149ed43e7d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e5a3792-1713-4d38-8bf3-ee149ed43e7d\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.769011 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e5a3792-1713-4d38-8bf3-ee149ed43e7d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e5a3792-1713-4d38-8bf3-ee149ed43e7d\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.775888 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qqp4\" (UniqueName: \"kubernetes.io/projected/1e5a3792-1713-4d38-8bf3-ee149ed43e7d-kube-api-access-4qqp4\") pod \"nova-cell1-novncproxy-0\" (UID: \"1e5a3792-1713-4d38-8bf3-ee149ed43e7d\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.795594 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.799106 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.817946 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.819379 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.851301 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7f9a021-79c5-43c1-8437-593690db947c-config-data\") pod \"nova-scheduler-0\" (UID: \"f7f9a021-79c5-43c1-8437-593690db947c\") " pod="openstack/nova-scheduler-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.851482 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64b95ec3-a607-4b8f-8395-465c2af78e4f-config-data\") pod \"nova-metadata-0\" (UID: \"64b95ec3-a607-4b8f-8395-465c2af78e4f\") " pod="openstack/nova-metadata-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.851592 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dns99\" (UniqueName: \"kubernetes.io/projected/f7f9a021-79c5-43c1-8437-593690db947c-kube-api-access-dns99\") pod \"nova-scheduler-0\" (UID: \"f7f9a021-79c5-43c1-8437-593690db947c\") " pod="openstack/nova-scheduler-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.851674 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64b95ec3-a607-4b8f-8395-465c2af78e4f-logs\") pod \"nova-metadata-0\" (UID: \"64b95ec3-a607-4b8f-8395-465c2af78e4f\") " pod="openstack/nova-metadata-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.851745 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvflt\" (UniqueName: \"kubernetes.io/projected/64b95ec3-a607-4b8f-8395-465c2af78e4f-kube-api-access-kvflt\") pod \"nova-metadata-0\" (UID: \"64b95ec3-a607-4b8f-8395-465c2af78e4f\") " pod="openstack/nova-metadata-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.851824 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7f9a021-79c5-43c1-8437-593690db947c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f7f9a021-79c5-43c1-8437-593690db947c\") " pod="openstack/nova-scheduler-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.851952 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64b95ec3-a607-4b8f-8395-465c2af78e4f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"64b95ec3-a607-4b8f-8395-465c2af78e4f\") " pod="openstack/nova-metadata-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.861883 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7f9a021-79c5-43c1-8437-593690db947c-config-data\") pod \"nova-scheduler-0\" (UID: \"f7f9a021-79c5-43c1-8437-593690db947c\") " pod="openstack/nova-scheduler-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.861963 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75fb48c489-59xpd"] Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.862976 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7f9a021-79c5-43c1-8437-593690db947c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f7f9a021-79c5-43c1-8437-593690db947c\") " pod="openstack/nova-scheduler-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.863812 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75fb48c489-59xpd" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.884483 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75fb48c489-59xpd"] Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.892914 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dns99\" (UniqueName: \"kubernetes.io/projected/f7f9a021-79c5-43c1-8437-593690db947c-kube-api-access-dns99\") pod \"nova-scheduler-0\" (UID: \"f7f9a021-79c5-43c1-8437-593690db947c\") " pod="openstack/nova-scheduler-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.936004 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.953394 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.954923 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-dns-svc\") pod \"dnsmasq-dns-75fb48c489-59xpd\" (UID: \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\") " pod="openstack/dnsmasq-dns-75fb48c489-59xpd" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.955054 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g67fh\" (UniqueName: \"kubernetes.io/projected/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-kube-api-access-g67fh\") pod \"nova-api-0\" (UID: \"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6\") " pod="openstack/nova-api-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.955629 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64b95ec3-a607-4b8f-8395-465c2af78e4f-config-data\") pod \"nova-metadata-0\" (UID: \"64b95ec3-a607-4b8f-8395-465c2af78e4f\") " pod="openstack/nova-metadata-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.955726 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64b95ec3-a607-4b8f-8395-465c2af78e4f-logs\") pod \"nova-metadata-0\" (UID: \"64b95ec3-a607-4b8f-8395-465c2af78e4f\") " pod="openstack/nova-metadata-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.955792 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvflt\" (UniqueName: \"kubernetes.io/projected/64b95ec3-a607-4b8f-8395-465c2af78e4f-kube-api-access-kvflt\") pod \"nova-metadata-0\" (UID: \"64b95ec3-a607-4b8f-8395-465c2af78e4f\") " pod="openstack/nova-metadata-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.955825 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-ovsdbserver-sb\") pod \"dnsmasq-dns-75fb48c489-59xpd\" (UID: \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\") " pod="openstack/dnsmasq-dns-75fb48c489-59xpd" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.955976 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-logs\") pod \"nova-api-0\" (UID: \"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6\") " pod="openstack/nova-api-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.956000 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-ovsdbserver-nb\") pod \"dnsmasq-dns-75fb48c489-59xpd\" (UID: \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\") " pod="openstack/dnsmasq-dns-75fb48c489-59xpd" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.956072 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6\") " pod="openstack/nova-api-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.956113 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64b95ec3-a607-4b8f-8395-465c2af78e4f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"64b95ec3-a607-4b8f-8395-465c2af78e4f\") " pod="openstack/nova-metadata-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.956132 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rr55m\" (UniqueName: \"kubernetes.io/projected/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-kube-api-access-rr55m\") pod \"dnsmasq-dns-75fb48c489-59xpd\" (UID: \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\") " pod="openstack/dnsmasq-dns-75fb48c489-59xpd" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.956246 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-config-data\") pod \"nova-api-0\" (UID: \"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6\") " pod="openstack/nova-api-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.956286 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-config\") pod \"dnsmasq-dns-75fb48c489-59xpd\" (UID: \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\") " pod="openstack/dnsmasq-dns-75fb48c489-59xpd" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.957469 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64b95ec3-a607-4b8f-8395-465c2af78e4f-logs\") pod \"nova-metadata-0\" (UID: \"64b95ec3-a607-4b8f-8395-465c2af78e4f\") " pod="openstack/nova-metadata-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.963206 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64b95ec3-a607-4b8f-8395-465c2af78e4f-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"64b95ec3-a607-4b8f-8395-465c2af78e4f\") " pod="openstack/nova-metadata-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.970575 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64b95ec3-a607-4b8f-8395-465c2af78e4f-config-data\") pod \"nova-metadata-0\" (UID: \"64b95ec3-a607-4b8f-8395-465c2af78e4f\") " pod="openstack/nova-metadata-0" Oct 09 09:22:03 crc kubenswrapper[4710]: I1009 09:22:03.980518 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvflt\" (UniqueName: \"kubernetes.io/projected/64b95ec3-a607-4b8f-8395-465c2af78e4f-kube-api-access-kvflt\") pod \"nova-metadata-0\" (UID: \"64b95ec3-a607-4b8f-8395-465c2af78e4f\") " pod="openstack/nova-metadata-0" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.004723 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.058909 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-config-data\") pod \"nova-api-0\" (UID: \"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6\") " pod="openstack/nova-api-0" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.059135 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-config\") pod \"dnsmasq-dns-75fb48c489-59xpd\" (UID: \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\") " pod="openstack/dnsmasq-dns-75fb48c489-59xpd" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.059206 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-dns-svc\") pod \"dnsmasq-dns-75fb48c489-59xpd\" (UID: \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\") " pod="openstack/dnsmasq-dns-75fb48c489-59xpd" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.060127 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-dns-svc\") pod \"dnsmasq-dns-75fb48c489-59xpd\" (UID: \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\") " pod="openstack/dnsmasq-dns-75fb48c489-59xpd" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.060133 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-config\") pod \"dnsmasq-dns-75fb48c489-59xpd\" (UID: \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\") " pod="openstack/dnsmasq-dns-75fb48c489-59xpd" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.060174 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g67fh\" (UniqueName: \"kubernetes.io/projected/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-kube-api-access-g67fh\") pod \"nova-api-0\" (UID: \"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6\") " pod="openstack/nova-api-0" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.060327 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-ovsdbserver-sb\") pod \"dnsmasq-dns-75fb48c489-59xpd\" (UID: \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\") " pod="openstack/dnsmasq-dns-75fb48c489-59xpd" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.060495 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-logs\") pod \"nova-api-0\" (UID: \"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6\") " pod="openstack/nova-api-0" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.060513 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-ovsdbserver-nb\") pod \"dnsmasq-dns-75fb48c489-59xpd\" (UID: \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\") " pod="openstack/dnsmasq-dns-75fb48c489-59xpd" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.060576 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6\") " pod="openstack/nova-api-0" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.060597 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rr55m\" (UniqueName: \"kubernetes.io/projected/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-kube-api-access-rr55m\") pod \"dnsmasq-dns-75fb48c489-59xpd\" (UID: \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\") " pod="openstack/dnsmasq-dns-75fb48c489-59xpd" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.061068 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-ovsdbserver-sb\") pod \"dnsmasq-dns-75fb48c489-59xpd\" (UID: \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\") " pod="openstack/dnsmasq-dns-75fb48c489-59xpd" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.061611 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-ovsdbserver-nb\") pod \"dnsmasq-dns-75fb48c489-59xpd\" (UID: \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\") " pod="openstack/dnsmasq-dns-75fb48c489-59xpd" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.068515 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-logs\") pod \"nova-api-0\" (UID: \"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6\") " pod="openstack/nova-api-0" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.069242 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-config-data\") pod \"nova-api-0\" (UID: \"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6\") " pod="openstack/nova-api-0" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.071497 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6\") " pod="openstack/nova-api-0" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.074279 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rr55m\" (UniqueName: \"kubernetes.io/projected/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-kube-api-access-rr55m\") pod \"dnsmasq-dns-75fb48c489-59xpd\" (UID: \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\") " pod="openstack/dnsmasq-dns-75fb48c489-59xpd" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.079728 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g67fh\" (UniqueName: \"kubernetes.io/projected/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-kube-api-access-g67fh\") pod \"nova-api-0\" (UID: \"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6\") " pod="openstack/nova-api-0" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.132804 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.185772 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75fb48c489-59xpd" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.304584 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-nvq24"] Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.442315 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 09:22:04 crc kubenswrapper[4710]: W1009 09:22:04.589654 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf7f9a021_79c5_43c1_8437_593690db947c.slice/crio-4651f26f06d5860fffdab9611eb8adc2ae5ff39bbd1d77df5372da4db771cef4 WatchSource:0}: Error finding container 4651f26f06d5860fffdab9611eb8adc2ae5ff39bbd1d77df5372da4db771cef4: Status 404 returned error can't find the container with id 4651f26f06d5860fffdab9611eb8adc2ae5ff39bbd1d77df5372da4db771cef4 Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.593564 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-74mpr"] Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.595242 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-74mpr" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.597268 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.599132 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.619905 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.651329 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-nvq24" event={"ID":"e80dad3d-99a6-42a9-8d55-c54f02dee2bd","Type":"ContainerStarted","Data":"8034ea960debf427021913fb5dc3aeae0f584145bb5af61c5407a2d175fb0abd"} Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.651397 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-nvq24" event={"ID":"e80dad3d-99a6-42a9-8d55-c54f02dee2bd","Type":"ContainerStarted","Data":"4baeceb6f2c9809b6de06da236406b5b9487159465eb4da517b0997a9d91d0ea"} Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.654118 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-74mpr"] Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.665331 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f7f9a021-79c5-43c1-8437-593690db947c","Type":"ContainerStarted","Data":"4651f26f06d5860fffdab9611eb8adc2ae5ff39bbd1d77df5372da4db771cef4"} Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.667060 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"1e5a3792-1713-4d38-8bf3-ee149ed43e7d","Type":"ContainerStarted","Data":"2864cb4e26c5afb0ed5beacf4aa86e8882565a35adff956d2b52ad6ef879f5ff"} Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.685005 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b890adc9-4438-4ec2-b8a9-ef4c010630a2-scripts\") pod \"nova-cell1-conductor-db-sync-74mpr\" (UID: \"b890adc9-4438-4ec2-b8a9-ef4c010630a2\") " pod="openstack/nova-cell1-conductor-db-sync-74mpr" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.685046 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b890adc9-4438-4ec2-b8a9-ef4c010630a2-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-74mpr\" (UID: \"b890adc9-4438-4ec2-b8a9-ef4c010630a2\") " pod="openstack/nova-cell1-conductor-db-sync-74mpr" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.685171 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b890adc9-4438-4ec2-b8a9-ef4c010630a2-config-data\") pod \"nova-cell1-conductor-db-sync-74mpr\" (UID: \"b890adc9-4438-4ec2-b8a9-ef4c010630a2\") " pod="openstack/nova-cell1-conductor-db-sync-74mpr" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.685251 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l96n6\" (UniqueName: \"kubernetes.io/projected/b890adc9-4438-4ec2-b8a9-ef4c010630a2-kube-api-access-l96n6\") pod \"nova-cell1-conductor-db-sync-74mpr\" (UID: \"b890adc9-4438-4ec2-b8a9-ef4c010630a2\") " pod="openstack/nova-cell1-conductor-db-sync-74mpr" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.708314 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.728640 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-nvq24" podStartSLOduration=1.728627206 podStartE2EDuration="1.728627206s" podCreationTimestamp="2025-10-09 09:22:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:22:04.671566005 +0000 UTC m=+1048.161674401" watchObservedRunningTime="2025-10-09 09:22:04.728627206 +0000 UTC m=+1048.218735603" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.751509 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 09:22:04 crc kubenswrapper[4710]: W1009 09:22:04.758108 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0c6c2b3e_1323_4927_a3c7_bcd0dce561f6.slice/crio-30b4deeb76dca68cad8b4dcaa6ed00a0913af29e3307e80cc3bfc4b017ab4449 WatchSource:0}: Error finding container 30b4deeb76dca68cad8b4dcaa6ed00a0913af29e3307e80cc3bfc4b017ab4449: Status 404 returned error can't find the container with id 30b4deeb76dca68cad8b4dcaa6ed00a0913af29e3307e80cc3bfc4b017ab4449 Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.787644 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b890adc9-4438-4ec2-b8a9-ef4c010630a2-config-data\") pod \"nova-cell1-conductor-db-sync-74mpr\" (UID: \"b890adc9-4438-4ec2-b8a9-ef4c010630a2\") " pod="openstack/nova-cell1-conductor-db-sync-74mpr" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.787786 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l96n6\" (UniqueName: \"kubernetes.io/projected/b890adc9-4438-4ec2-b8a9-ef4c010630a2-kube-api-access-l96n6\") pod \"nova-cell1-conductor-db-sync-74mpr\" (UID: \"b890adc9-4438-4ec2-b8a9-ef4c010630a2\") " pod="openstack/nova-cell1-conductor-db-sync-74mpr" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.787876 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b890adc9-4438-4ec2-b8a9-ef4c010630a2-scripts\") pod \"nova-cell1-conductor-db-sync-74mpr\" (UID: \"b890adc9-4438-4ec2-b8a9-ef4c010630a2\") " pod="openstack/nova-cell1-conductor-db-sync-74mpr" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.787899 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b890adc9-4438-4ec2-b8a9-ef4c010630a2-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-74mpr\" (UID: \"b890adc9-4438-4ec2-b8a9-ef4c010630a2\") " pod="openstack/nova-cell1-conductor-db-sync-74mpr" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.799394 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b890adc9-4438-4ec2-b8a9-ef4c010630a2-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-74mpr\" (UID: \"b890adc9-4438-4ec2-b8a9-ef4c010630a2\") " pod="openstack/nova-cell1-conductor-db-sync-74mpr" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.799986 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b890adc9-4438-4ec2-b8a9-ef4c010630a2-scripts\") pod \"nova-cell1-conductor-db-sync-74mpr\" (UID: \"b890adc9-4438-4ec2-b8a9-ef4c010630a2\") " pod="openstack/nova-cell1-conductor-db-sync-74mpr" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.807081 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b890adc9-4438-4ec2-b8a9-ef4c010630a2-config-data\") pod \"nova-cell1-conductor-db-sync-74mpr\" (UID: \"b890adc9-4438-4ec2-b8a9-ef4c010630a2\") " pod="openstack/nova-cell1-conductor-db-sync-74mpr" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.810298 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l96n6\" (UniqueName: \"kubernetes.io/projected/b890adc9-4438-4ec2-b8a9-ef4c010630a2-kube-api-access-l96n6\") pod \"nova-cell1-conductor-db-sync-74mpr\" (UID: \"b890adc9-4438-4ec2-b8a9-ef4c010630a2\") " pod="openstack/nova-cell1-conductor-db-sync-74mpr" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.855769 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75fb48c489-59xpd"] Oct 09 09:22:04 crc kubenswrapper[4710]: W1009 09:22:04.860418 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf5fb6ddf_08c7_4cd5_acaf_338e55a82730.slice/crio-b9b982255ede11a6ac6df5ad95d7258a48cc6166225a5af65c40d4ef876c424a WatchSource:0}: Error finding container b9b982255ede11a6ac6df5ad95d7258a48cc6166225a5af65c40d4ef876c424a: Status 404 returned error can't find the container with id b9b982255ede11a6ac6df5ad95d7258a48cc6166225a5af65c40d4ef876c424a Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.969025 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="72717529-ae4b-4785-bf30-18d13068cddd" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Oct 09 09:22:04 crc kubenswrapper[4710]: I1009 09:22:04.977354 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-74mpr" Oct 09 09:22:05 crc kubenswrapper[4710]: I1009 09:22:05.414484 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-74mpr"] Oct 09 09:22:05 crc kubenswrapper[4710]: W1009 09:22:05.425588 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb890adc9_4438_4ec2_b8a9_ef4c010630a2.slice/crio-561f6ef2af96e9fbdd6518cf8839bc2b73eb25d079bf73a1f8b4015202a6da96 WatchSource:0}: Error finding container 561f6ef2af96e9fbdd6518cf8839bc2b73eb25d079bf73a1f8b4015202a6da96: Status 404 returned error can't find the container with id 561f6ef2af96e9fbdd6518cf8839bc2b73eb25d079bf73a1f8b4015202a6da96 Oct 09 09:22:05 crc kubenswrapper[4710]: I1009 09:22:05.679364 4710 generic.go:334] "Generic (PLEG): container finished" podID="f5fb6ddf-08c7-4cd5-acaf-338e55a82730" containerID="0eb8fa0aa64e9067bc6082629f296ce99bce32193f7cf8126b002789b3311626" exitCode=0 Oct 09 09:22:05 crc kubenswrapper[4710]: I1009 09:22:05.679670 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75fb48c489-59xpd" event={"ID":"f5fb6ddf-08c7-4cd5-acaf-338e55a82730","Type":"ContainerDied","Data":"0eb8fa0aa64e9067bc6082629f296ce99bce32193f7cf8126b002789b3311626"} Oct 09 09:22:05 crc kubenswrapper[4710]: I1009 09:22:05.679698 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75fb48c489-59xpd" event={"ID":"f5fb6ddf-08c7-4cd5-acaf-338e55a82730","Type":"ContainerStarted","Data":"b9b982255ede11a6ac6df5ad95d7258a48cc6166225a5af65c40d4ef876c424a"} Oct 09 09:22:05 crc kubenswrapper[4710]: I1009 09:22:05.683957 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6","Type":"ContainerStarted","Data":"30b4deeb76dca68cad8b4dcaa6ed00a0913af29e3307e80cc3bfc4b017ab4449"} Oct 09 09:22:05 crc kubenswrapper[4710]: I1009 09:22:05.691884 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"64b95ec3-a607-4b8f-8395-465c2af78e4f","Type":"ContainerStarted","Data":"76707635288b6e85c64bb0516fe2d569994f3e1e7fbb3c493a2e846eb4c062c4"} Oct 09 09:22:05 crc kubenswrapper[4710]: I1009 09:22:05.694195 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-74mpr" event={"ID":"b890adc9-4438-4ec2-b8a9-ef4c010630a2","Type":"ContainerStarted","Data":"4e8cfac6d19526cb2dcc1273b769b08eb964fea262a3ce0f2d15f2026d9c0efd"} Oct 09 09:22:05 crc kubenswrapper[4710]: I1009 09:22:05.694236 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-74mpr" event={"ID":"b890adc9-4438-4ec2-b8a9-ef4c010630a2","Type":"ContainerStarted","Data":"561f6ef2af96e9fbdd6518cf8839bc2b73eb25d079bf73a1f8b4015202a6da96"} Oct 09 09:22:05 crc kubenswrapper[4710]: I1009 09:22:05.754405 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-74mpr" podStartSLOduration=1.754385476 podStartE2EDuration="1.754385476s" podCreationTimestamp="2025-10-09 09:22:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:22:05.746962578 +0000 UTC m=+1049.237070975" watchObservedRunningTime="2025-10-09 09:22:05.754385476 +0000 UTC m=+1049.244493893" Oct 09 09:22:06 crc kubenswrapper[4710]: I1009 09:22:06.717664 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75fb48c489-59xpd" event={"ID":"f5fb6ddf-08c7-4cd5-acaf-338e55a82730","Type":"ContainerStarted","Data":"be471bdc411d4bc229058c8db7483054becb153581fd41590d5f8e0e4106a89f"} Oct 09 09:22:06 crc kubenswrapper[4710]: I1009 09:22:06.718100 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-75fb48c489-59xpd" Oct 09 09:22:06 crc kubenswrapper[4710]: I1009 09:22:06.746455 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-75fb48c489-59xpd" podStartSLOduration=3.746443245 podStartE2EDuration="3.746443245s" podCreationTimestamp="2025-10-09 09:22:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:22:06.735374352 +0000 UTC m=+1050.225482750" watchObservedRunningTime="2025-10-09 09:22:06.746443245 +0000 UTC m=+1050.236551642" Oct 09 09:22:07 crc kubenswrapper[4710]: I1009 09:22:07.645909 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 09:22:07 crc kubenswrapper[4710]: I1009 09:22:07.662297 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 09:22:08 crc kubenswrapper[4710]: I1009 09:22:08.739067 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f7f9a021-79c5-43c1-8437-593690db947c","Type":"ContainerStarted","Data":"0481b7ce97bf657d1d31efb2b88381bb45e0becdc9d77b7ee4eb56ea61e8c957"} Oct 09 09:22:08 crc kubenswrapper[4710]: I1009 09:22:08.741200 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"1e5a3792-1713-4d38-8bf3-ee149ed43e7d","Type":"ContainerStarted","Data":"775e0500c9f4dc8dfdf57d990b0af4bd4e00cfcdd06b328f23ed8a5fed6ed554"} Oct 09 09:22:08 crc kubenswrapper[4710]: I1009 09:22:08.741370 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="1e5a3792-1713-4d38-8bf3-ee149ed43e7d" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://775e0500c9f4dc8dfdf57d990b0af4bd4e00cfcdd06b328f23ed8a5fed6ed554" gracePeriod=30 Oct 09 09:22:08 crc kubenswrapper[4710]: I1009 09:22:08.762286 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.5571588 podStartE2EDuration="5.7622681s" podCreationTimestamp="2025-10-09 09:22:03 +0000 UTC" firstStartedPulling="2025-10-09 09:22:04.596526971 +0000 UTC m=+1048.086635368" lastFinishedPulling="2025-10-09 09:22:07.80163628 +0000 UTC m=+1051.291744668" observedRunningTime="2025-10-09 09:22:08.756805268 +0000 UTC m=+1052.246913665" watchObservedRunningTime="2025-10-09 09:22:08.7622681 +0000 UTC m=+1052.252376487" Oct 09 09:22:08 crc kubenswrapper[4710]: I1009 09:22:08.936661 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:08 crc kubenswrapper[4710]: I1009 09:22:08.953943 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 09 09:22:09 crc kubenswrapper[4710]: I1009 09:22:09.755778 4710 generic.go:334] "Generic (PLEG): container finished" podID="72717529-ae4b-4785-bf30-18d13068cddd" containerID="241e33c60eba537e14168dc8a5e77ace8fc897fd729371f52ab6686ca82a6292" exitCode=137 Oct 09 09:22:09 crc kubenswrapper[4710]: I1009 09:22:09.755968 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72717529-ae4b-4785-bf30-18d13068cddd","Type":"ContainerDied","Data":"241e33c60eba537e14168dc8a5e77ace8fc897fd729371f52ab6686ca82a6292"} Oct 09 09:22:09 crc kubenswrapper[4710]: I1009 09:22:09.757439 4710 generic.go:334] "Generic (PLEG): container finished" podID="b890adc9-4438-4ec2-b8a9-ef4c010630a2" containerID="4e8cfac6d19526cb2dcc1273b769b08eb964fea262a3ce0f2d15f2026d9c0efd" exitCode=0 Oct 09 09:22:09 crc kubenswrapper[4710]: I1009 09:22:09.757467 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-74mpr" event={"ID":"b890adc9-4438-4ec2-b8a9-ef4c010630a2","Type":"ContainerDied","Data":"4e8cfac6d19526cb2dcc1273b769b08eb964fea262a3ce0f2d15f2026d9c0efd"} Oct 09 09:22:09 crc kubenswrapper[4710]: I1009 09:22:09.775003 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.421660213 podStartE2EDuration="6.774990607s" podCreationTimestamp="2025-10-09 09:22:03 +0000 UTC" firstStartedPulling="2025-10-09 09:22:04.450573603 +0000 UTC m=+1047.940682000" lastFinishedPulling="2025-10-09 09:22:07.803903997 +0000 UTC m=+1051.294012394" observedRunningTime="2025-10-09 09:22:08.779962206 +0000 UTC m=+1052.270070603" watchObservedRunningTime="2025-10-09 09:22:09.774990607 +0000 UTC m=+1053.265099005" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.084125 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.115685 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-combined-ca-bundle\") pod \"72717529-ae4b-4785-bf30-18d13068cddd\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.116017 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-sg-core-conf-yaml\") pod \"72717529-ae4b-4785-bf30-18d13068cddd\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.116048 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-scripts\") pod \"72717529-ae4b-4785-bf30-18d13068cddd\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.116081 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-config-data\") pod \"72717529-ae4b-4785-bf30-18d13068cddd\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.116142 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-688j8\" (UniqueName: \"kubernetes.io/projected/72717529-ae4b-4785-bf30-18d13068cddd-kube-api-access-688j8\") pod \"72717529-ae4b-4785-bf30-18d13068cddd\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.116165 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72717529-ae4b-4785-bf30-18d13068cddd-log-httpd\") pod \"72717529-ae4b-4785-bf30-18d13068cddd\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.116201 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72717529-ae4b-4785-bf30-18d13068cddd-run-httpd\") pod \"72717529-ae4b-4785-bf30-18d13068cddd\" (UID: \"72717529-ae4b-4785-bf30-18d13068cddd\") " Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.117147 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72717529-ae4b-4785-bf30-18d13068cddd-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "72717529-ae4b-4785-bf30-18d13068cddd" (UID: "72717529-ae4b-4785-bf30-18d13068cddd"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.117774 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72717529-ae4b-4785-bf30-18d13068cddd-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "72717529-ae4b-4785-bf30-18d13068cddd" (UID: "72717529-ae4b-4785-bf30-18d13068cddd"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.120955 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-scripts" (OuterVolumeSpecName: "scripts") pod "72717529-ae4b-4785-bf30-18d13068cddd" (UID: "72717529-ae4b-4785-bf30-18d13068cddd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.122988 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72717529-ae4b-4785-bf30-18d13068cddd-kube-api-access-688j8" (OuterVolumeSpecName: "kube-api-access-688j8") pod "72717529-ae4b-4785-bf30-18d13068cddd" (UID: "72717529-ae4b-4785-bf30-18d13068cddd"). InnerVolumeSpecName "kube-api-access-688j8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.205713 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "72717529-ae4b-4785-bf30-18d13068cddd" (UID: "72717529-ae4b-4785-bf30-18d13068cddd"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.221214 4710 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.221454 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.221550 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-688j8\" (UniqueName: \"kubernetes.io/projected/72717529-ae4b-4785-bf30-18d13068cddd-kube-api-access-688j8\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.221702 4710 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72717529-ae4b-4785-bf30-18d13068cddd-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.221796 4710 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/72717529-ae4b-4785-bf30-18d13068cddd-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.247743 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-config-data" (OuterVolumeSpecName: "config-data") pod "72717529-ae4b-4785-bf30-18d13068cddd" (UID: "72717529-ae4b-4785-bf30-18d13068cddd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.268129 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "72717529-ae4b-4785-bf30-18d13068cddd" (UID: "72717529-ae4b-4785-bf30-18d13068cddd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.327463 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.327652 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72717529-ae4b-4785-bf30-18d13068cddd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.767537 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.768385 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"72717529-ae4b-4785-bf30-18d13068cddd","Type":"ContainerDied","Data":"30a09d9e512b4b1100747f64ba7ff883509cad7067937a6bb895a38f321cce44"} Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.768539 4710 scope.go:117] "RemoveContainer" containerID="241e33c60eba537e14168dc8a5e77ace8fc897fd729371f52ab6686ca82a6292" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.773330 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6","Type":"ContainerStarted","Data":"4b27d03371d77cf40f5fa7d94f8231959167cb8b3bff1a45ccce1e9e6bccbbc4"} Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.773408 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6","Type":"ContainerStarted","Data":"f546ded406e305f2038d1b7b0ef026dfc164883c90815e8bffbe73924ddb988f"} Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.781607 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"64b95ec3-a607-4b8f-8395-465c2af78e4f","Type":"ContainerStarted","Data":"718f48c62c16bbeab86f7da90755d1a67cd4889627762ca643dd15e2b091943e"} Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.781686 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"64b95ec3-a607-4b8f-8395-465c2af78e4f","Type":"ContainerStarted","Data":"3f16af5eb58ba1382c507e6e5399cd1262f645b8ad84d0ba96186deeeb2a6d9f"} Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.781926 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="64b95ec3-a607-4b8f-8395-465c2af78e4f" containerName="nova-metadata-log" containerID="cri-o://3f16af5eb58ba1382c507e6e5399cd1262f645b8ad84d0ba96186deeeb2a6d9f" gracePeriod=30 Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.782091 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="64b95ec3-a607-4b8f-8395-465c2af78e4f" containerName="nova-metadata-metadata" containerID="cri-o://718f48c62c16bbeab86f7da90755d1a67cd4889627762ca643dd15e2b091943e" gracePeriod=30 Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.795374 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.766777684 podStartE2EDuration="7.795359874s" podCreationTimestamp="2025-10-09 09:22:03 +0000 UTC" firstStartedPulling="2025-10-09 09:22:04.774985085 +0000 UTC m=+1048.265093472" lastFinishedPulling="2025-10-09 09:22:09.803567265 +0000 UTC m=+1053.293675662" observedRunningTime="2025-10-09 09:22:10.794182124 +0000 UTC m=+1054.284290521" watchObservedRunningTime="2025-10-09 09:22:10.795359874 +0000 UTC m=+1054.285468271" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.813840 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.74667673 podStartE2EDuration="7.813819194s" podCreationTimestamp="2025-10-09 09:22:03 +0000 UTC" firstStartedPulling="2025-10-09 09:22:04.730529955 +0000 UTC m=+1048.220638352" lastFinishedPulling="2025-10-09 09:22:09.797672419 +0000 UTC m=+1053.287780816" observedRunningTime="2025-10-09 09:22:10.811861752 +0000 UTC m=+1054.301970150" watchObservedRunningTime="2025-10-09 09:22:10.813819194 +0000 UTC m=+1054.303927591" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.848336 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.868669 4710 scope.go:117] "RemoveContainer" containerID="dd690eef57295d92a44f1b4ce81eca9d24d7b767a5ec639c25e8647811aad840" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.874733 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.890393 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:22:10 crc kubenswrapper[4710]: E1009 09:22:10.890968 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72717529-ae4b-4785-bf30-18d13068cddd" containerName="ceilometer-notification-agent" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.890994 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="72717529-ae4b-4785-bf30-18d13068cddd" containerName="ceilometer-notification-agent" Oct 09 09:22:10 crc kubenswrapper[4710]: E1009 09:22:10.891008 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72717529-ae4b-4785-bf30-18d13068cddd" containerName="sg-core" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.891013 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="72717529-ae4b-4785-bf30-18d13068cddd" containerName="sg-core" Oct 09 09:22:10 crc kubenswrapper[4710]: E1009 09:22:10.891026 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72717529-ae4b-4785-bf30-18d13068cddd" containerName="proxy-httpd" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.891031 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="72717529-ae4b-4785-bf30-18d13068cddd" containerName="proxy-httpd" Oct 09 09:22:10 crc kubenswrapper[4710]: E1009 09:22:10.891042 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72717529-ae4b-4785-bf30-18d13068cddd" containerName="ceilometer-central-agent" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.891048 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="72717529-ae4b-4785-bf30-18d13068cddd" containerName="ceilometer-central-agent" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.891266 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="72717529-ae4b-4785-bf30-18d13068cddd" containerName="proxy-httpd" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.891291 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="72717529-ae4b-4785-bf30-18d13068cddd" containerName="ceilometer-central-agent" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.891299 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="72717529-ae4b-4785-bf30-18d13068cddd" containerName="ceilometer-notification-agent" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.891309 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="72717529-ae4b-4785-bf30-18d13068cddd" containerName="sg-core" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.893180 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.899419 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.899685 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.909021 4710 scope.go:117] "RemoveContainer" containerID="bad3c1788d92fad48d5cc7148074f32c85f8727f26176e1a6a0e7e3a547f5d2a" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.911163 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.936141 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-scripts\") pod \"ceilometer-0\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " pod="openstack/ceilometer-0" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.936230 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-log-httpd\") pod \"ceilometer-0\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " pod="openstack/ceilometer-0" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.936257 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " pod="openstack/ceilometer-0" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.936274 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-run-httpd\") pod \"ceilometer-0\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " pod="openstack/ceilometer-0" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.936309 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-config-data\") pod \"ceilometer-0\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " pod="openstack/ceilometer-0" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.936398 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkq22\" (UniqueName: \"kubernetes.io/projected/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-kube-api-access-gkq22\") pod \"ceilometer-0\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " pod="openstack/ceilometer-0" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.936422 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " pod="openstack/ceilometer-0" Oct 09 09:22:10 crc kubenswrapper[4710]: I1009 09:22:10.970913 4710 scope.go:117] "RemoveContainer" containerID="675a500a8ea6b2b0fdce0a7ccd41685403b56bad5780cc7bf330dc770969b2fd" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.038569 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-log-httpd\") pod \"ceilometer-0\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " pod="openstack/ceilometer-0" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.038632 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " pod="openstack/ceilometer-0" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.038671 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-run-httpd\") pod \"ceilometer-0\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " pod="openstack/ceilometer-0" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.038697 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-config-data\") pod \"ceilometer-0\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " pod="openstack/ceilometer-0" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.038755 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkq22\" (UniqueName: \"kubernetes.io/projected/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-kube-api-access-gkq22\") pod \"ceilometer-0\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " pod="openstack/ceilometer-0" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.038780 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " pod="openstack/ceilometer-0" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.038883 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-scripts\") pod \"ceilometer-0\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " pod="openstack/ceilometer-0" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.039129 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-log-httpd\") pod \"ceilometer-0\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " pod="openstack/ceilometer-0" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.039779 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-run-httpd\") pod \"ceilometer-0\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " pod="openstack/ceilometer-0" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.045045 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-config-data\") pod \"ceilometer-0\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " pod="openstack/ceilometer-0" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.046890 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " pod="openstack/ceilometer-0" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.048722 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " pod="openstack/ceilometer-0" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.055002 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkq22\" (UniqueName: \"kubernetes.io/projected/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-kube-api-access-gkq22\") pod \"ceilometer-0\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " pod="openstack/ceilometer-0" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.056276 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-scripts\") pod \"ceilometer-0\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " pod="openstack/ceilometer-0" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.173182 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-74mpr" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.243232 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l96n6\" (UniqueName: \"kubernetes.io/projected/b890adc9-4438-4ec2-b8a9-ef4c010630a2-kube-api-access-l96n6\") pod \"b890adc9-4438-4ec2-b8a9-ef4c010630a2\" (UID: \"b890adc9-4438-4ec2-b8a9-ef4c010630a2\") " Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.243317 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b890adc9-4438-4ec2-b8a9-ef4c010630a2-config-data\") pod \"b890adc9-4438-4ec2-b8a9-ef4c010630a2\" (UID: \"b890adc9-4438-4ec2-b8a9-ef4c010630a2\") " Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.243988 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b890adc9-4438-4ec2-b8a9-ef4c010630a2-scripts\") pod \"b890adc9-4438-4ec2-b8a9-ef4c010630a2\" (UID: \"b890adc9-4438-4ec2-b8a9-ef4c010630a2\") " Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.244091 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b890adc9-4438-4ec2-b8a9-ef4c010630a2-combined-ca-bundle\") pod \"b890adc9-4438-4ec2-b8a9-ef4c010630a2\" (UID: \"b890adc9-4438-4ec2-b8a9-ef4c010630a2\") " Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.246609 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b890adc9-4438-4ec2-b8a9-ef4c010630a2-kube-api-access-l96n6" (OuterVolumeSpecName: "kube-api-access-l96n6") pod "b890adc9-4438-4ec2-b8a9-ef4c010630a2" (UID: "b890adc9-4438-4ec2-b8a9-ef4c010630a2"). InnerVolumeSpecName "kube-api-access-l96n6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.247815 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b890adc9-4438-4ec2-b8a9-ef4c010630a2-scripts" (OuterVolumeSpecName: "scripts") pod "b890adc9-4438-4ec2-b8a9-ef4c010630a2" (UID: "b890adc9-4438-4ec2-b8a9-ef4c010630a2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.257653 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.275502 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b890adc9-4438-4ec2-b8a9-ef4c010630a2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b890adc9-4438-4ec2-b8a9-ef4c010630a2" (UID: "b890adc9-4438-4ec2-b8a9-ef4c010630a2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.276273 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b890adc9-4438-4ec2-b8a9-ef4c010630a2-config-data" (OuterVolumeSpecName: "config-data") pod "b890adc9-4438-4ec2-b8a9-ef4c010630a2" (UID: "b890adc9-4438-4ec2-b8a9-ef4c010630a2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.345776 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b890adc9-4438-4ec2-b8a9-ef4c010630a2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.345798 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l96n6\" (UniqueName: \"kubernetes.io/projected/b890adc9-4438-4ec2-b8a9-ef4c010630a2-kube-api-access-l96n6\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.345813 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b890adc9-4438-4ec2-b8a9-ef4c010630a2-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.345822 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b890adc9-4438-4ec2-b8a9-ef4c010630a2-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.375651 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.446710 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvflt\" (UniqueName: \"kubernetes.io/projected/64b95ec3-a607-4b8f-8395-465c2af78e4f-kube-api-access-kvflt\") pod \"64b95ec3-a607-4b8f-8395-465c2af78e4f\" (UID: \"64b95ec3-a607-4b8f-8395-465c2af78e4f\") " Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.446849 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64b95ec3-a607-4b8f-8395-465c2af78e4f-logs\") pod \"64b95ec3-a607-4b8f-8395-465c2af78e4f\" (UID: \"64b95ec3-a607-4b8f-8395-465c2af78e4f\") " Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.446892 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64b95ec3-a607-4b8f-8395-465c2af78e4f-combined-ca-bundle\") pod \"64b95ec3-a607-4b8f-8395-465c2af78e4f\" (UID: \"64b95ec3-a607-4b8f-8395-465c2af78e4f\") " Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.446928 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64b95ec3-a607-4b8f-8395-465c2af78e4f-config-data\") pod \"64b95ec3-a607-4b8f-8395-465c2af78e4f\" (UID: \"64b95ec3-a607-4b8f-8395-465c2af78e4f\") " Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.447994 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64b95ec3-a607-4b8f-8395-465c2af78e4f-logs" (OuterVolumeSpecName: "logs") pod "64b95ec3-a607-4b8f-8395-465c2af78e4f" (UID: "64b95ec3-a607-4b8f-8395-465c2af78e4f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.456916 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64b95ec3-a607-4b8f-8395-465c2af78e4f-kube-api-access-kvflt" (OuterVolumeSpecName: "kube-api-access-kvflt") pod "64b95ec3-a607-4b8f-8395-465c2af78e4f" (UID: "64b95ec3-a607-4b8f-8395-465c2af78e4f"). InnerVolumeSpecName "kube-api-access-kvflt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.475103 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64b95ec3-a607-4b8f-8395-465c2af78e4f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "64b95ec3-a607-4b8f-8395-465c2af78e4f" (UID: "64b95ec3-a607-4b8f-8395-465c2af78e4f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.476387 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64b95ec3-a607-4b8f-8395-465c2af78e4f-config-data" (OuterVolumeSpecName: "config-data") pod "64b95ec3-a607-4b8f-8395-465c2af78e4f" (UID: "64b95ec3-a607-4b8f-8395-465c2af78e4f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.549197 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvflt\" (UniqueName: \"kubernetes.io/projected/64b95ec3-a607-4b8f-8395-465c2af78e4f-kube-api-access-kvflt\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.549224 4710 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64b95ec3-a607-4b8f-8395-465c2af78e4f-logs\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.549236 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64b95ec3-a607-4b8f-8395-465c2af78e4f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.549244 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64b95ec3-a607-4b8f-8395-465c2af78e4f-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.701307 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.790026 4710 generic.go:334] "Generic (PLEG): container finished" podID="64b95ec3-a607-4b8f-8395-465c2af78e4f" containerID="718f48c62c16bbeab86f7da90755d1a67cd4889627762ca643dd15e2b091943e" exitCode=0 Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.790301 4710 generic.go:334] "Generic (PLEG): container finished" podID="64b95ec3-a607-4b8f-8395-465c2af78e4f" containerID="3f16af5eb58ba1382c507e6e5399cd1262f645b8ad84d0ba96186deeeb2a6d9f" exitCode=143 Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.790152 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.790176 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"64b95ec3-a607-4b8f-8395-465c2af78e4f","Type":"ContainerDied","Data":"718f48c62c16bbeab86f7da90755d1a67cd4889627762ca643dd15e2b091943e"} Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.791371 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"64b95ec3-a607-4b8f-8395-465c2af78e4f","Type":"ContainerDied","Data":"3f16af5eb58ba1382c507e6e5399cd1262f645b8ad84d0ba96186deeeb2a6d9f"} Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.791401 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"64b95ec3-a607-4b8f-8395-465c2af78e4f","Type":"ContainerDied","Data":"76707635288b6e85c64bb0516fe2d569994f3e1e7fbb3c493a2e846eb4c062c4"} Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.791420 4710 scope.go:117] "RemoveContainer" containerID="718f48c62c16bbeab86f7da90755d1a67cd4889627762ca643dd15e2b091943e" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.793872 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-74mpr" event={"ID":"b890adc9-4438-4ec2-b8a9-ef4c010630a2","Type":"ContainerDied","Data":"561f6ef2af96e9fbdd6518cf8839bc2b73eb25d079bf73a1f8b4015202a6da96"} Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.793904 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="561f6ef2af96e9fbdd6518cf8839bc2b73eb25d079bf73a1f8b4015202a6da96" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.793958 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-74mpr" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.795993 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e0a2a9-9a61-4202-a6e8-34ea5d035533","Type":"ContainerStarted","Data":"8273678c1e002d63f66f907c246168823901a228de5a39ec3fa87d5f302afec6"} Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.842902 4710 scope.go:117] "RemoveContainer" containerID="3f16af5eb58ba1382c507e6e5399cd1262f645b8ad84d0ba96186deeeb2a6d9f" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.864275 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 09:22:11 crc kubenswrapper[4710]: E1009 09:22:11.864712 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64b95ec3-a607-4b8f-8395-465c2af78e4f" containerName="nova-metadata-log" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.864730 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="64b95ec3-a607-4b8f-8395-465c2af78e4f" containerName="nova-metadata-log" Oct 09 09:22:11 crc kubenswrapper[4710]: E1009 09:22:11.864746 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64b95ec3-a607-4b8f-8395-465c2af78e4f" containerName="nova-metadata-metadata" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.864753 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="64b95ec3-a607-4b8f-8395-465c2af78e4f" containerName="nova-metadata-metadata" Oct 09 09:22:11 crc kubenswrapper[4710]: E1009 09:22:11.864785 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b890adc9-4438-4ec2-b8a9-ef4c010630a2" containerName="nova-cell1-conductor-db-sync" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.864792 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="b890adc9-4438-4ec2-b8a9-ef4c010630a2" containerName="nova-cell1-conductor-db-sync" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.864971 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="64b95ec3-a607-4b8f-8395-465c2af78e4f" containerName="nova-metadata-metadata" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.864996 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="64b95ec3-a607-4b8f-8395-465c2af78e4f" containerName="nova-metadata-log" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.865010 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="b890adc9-4438-4ec2-b8a9-ef4c010630a2" containerName="nova-cell1-conductor-db-sync" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.865402 4710 scope.go:117] "RemoveContainer" containerID="718f48c62c16bbeab86f7da90755d1a67cd4889627762ca643dd15e2b091943e" Oct 09 09:22:11 crc kubenswrapper[4710]: E1009 09:22:11.865734 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"718f48c62c16bbeab86f7da90755d1a67cd4889627762ca643dd15e2b091943e\": container with ID starting with 718f48c62c16bbeab86f7da90755d1a67cd4889627762ca643dd15e2b091943e not found: ID does not exist" containerID="718f48c62c16bbeab86f7da90755d1a67cd4889627762ca643dd15e2b091943e" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.865778 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"718f48c62c16bbeab86f7da90755d1a67cd4889627762ca643dd15e2b091943e"} err="failed to get container status \"718f48c62c16bbeab86f7da90755d1a67cd4889627762ca643dd15e2b091943e\": rpc error: code = NotFound desc = could not find container \"718f48c62c16bbeab86f7da90755d1a67cd4889627762ca643dd15e2b091943e\": container with ID starting with 718f48c62c16bbeab86f7da90755d1a67cd4889627762ca643dd15e2b091943e not found: ID does not exist" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.865806 4710 scope.go:117] "RemoveContainer" containerID="3f16af5eb58ba1382c507e6e5399cd1262f645b8ad84d0ba96186deeeb2a6d9f" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.865817 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 09 09:22:11 crc kubenswrapper[4710]: E1009 09:22:11.866180 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f16af5eb58ba1382c507e6e5399cd1262f645b8ad84d0ba96186deeeb2a6d9f\": container with ID starting with 3f16af5eb58ba1382c507e6e5399cd1262f645b8ad84d0ba96186deeeb2a6d9f not found: ID does not exist" containerID="3f16af5eb58ba1382c507e6e5399cd1262f645b8ad84d0ba96186deeeb2a6d9f" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.866208 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f16af5eb58ba1382c507e6e5399cd1262f645b8ad84d0ba96186deeeb2a6d9f"} err="failed to get container status \"3f16af5eb58ba1382c507e6e5399cd1262f645b8ad84d0ba96186deeeb2a6d9f\": rpc error: code = NotFound desc = could not find container \"3f16af5eb58ba1382c507e6e5399cd1262f645b8ad84d0ba96186deeeb2a6d9f\": container with ID starting with 3f16af5eb58ba1382c507e6e5399cd1262f645b8ad84d0ba96186deeeb2a6d9f not found: ID does not exist" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.866225 4710 scope.go:117] "RemoveContainer" containerID="718f48c62c16bbeab86f7da90755d1a67cd4889627762ca643dd15e2b091943e" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.866496 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"718f48c62c16bbeab86f7da90755d1a67cd4889627762ca643dd15e2b091943e"} err="failed to get container status \"718f48c62c16bbeab86f7da90755d1a67cd4889627762ca643dd15e2b091943e\": rpc error: code = NotFound desc = could not find container \"718f48c62c16bbeab86f7da90755d1a67cd4889627762ca643dd15e2b091943e\": container with ID starting with 718f48c62c16bbeab86f7da90755d1a67cd4889627762ca643dd15e2b091943e not found: ID does not exist" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.866520 4710 scope.go:117] "RemoveContainer" containerID="3f16af5eb58ba1382c507e6e5399cd1262f645b8ad84d0ba96186deeeb2a6d9f" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.867040 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f16af5eb58ba1382c507e6e5399cd1262f645b8ad84d0ba96186deeeb2a6d9f"} err="failed to get container status \"3f16af5eb58ba1382c507e6e5399cd1262f645b8ad84d0ba96186deeeb2a6d9f\": rpc error: code = NotFound desc = could not find container \"3f16af5eb58ba1382c507e6e5399cd1262f645b8ad84d0ba96186deeeb2a6d9f\": container with ID starting with 3f16af5eb58ba1382c507e6e5399cd1262f645b8ad84d0ba96186deeeb2a6d9f not found: ID does not exist" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.867553 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.891662 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.902484 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.913537 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.943172 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.944740 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.946193 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.946728 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.948986 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.955418 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7541f91-8a95-4a0e-9cdd-95252f38710b-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d7541f91-8a95-4a0e-9cdd-95252f38710b\") " pod="openstack/nova-cell1-conductor-0" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.955536 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5sk2c\" (UniqueName: \"kubernetes.io/projected/d7541f91-8a95-4a0e-9cdd-95252f38710b-kube-api-access-5sk2c\") pod \"nova-cell1-conductor-0\" (UID: \"d7541f91-8a95-4a0e-9cdd-95252f38710b\") " pod="openstack/nova-cell1-conductor-0" Oct 09 09:22:11 crc kubenswrapper[4710]: I1009 09:22:11.955620 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7541f91-8a95-4a0e-9cdd-95252f38710b-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d7541f91-8a95-4a0e-9cdd-95252f38710b\") " pod="openstack/nova-cell1-conductor-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.056855 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7541f91-8a95-4a0e-9cdd-95252f38710b-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d7541f91-8a95-4a0e-9cdd-95252f38710b\") " pod="openstack/nova-cell1-conductor-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.056912 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-logs\") pod \"nova-metadata-0\" (UID: \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\") " pod="openstack/nova-metadata-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.056973 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\") " pod="openstack/nova-metadata-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.057013 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sn57z\" (UniqueName: \"kubernetes.io/projected/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-kube-api-access-sn57z\") pod \"nova-metadata-0\" (UID: \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\") " pod="openstack/nova-metadata-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.057037 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5sk2c\" (UniqueName: \"kubernetes.io/projected/d7541f91-8a95-4a0e-9cdd-95252f38710b-kube-api-access-5sk2c\") pod \"nova-cell1-conductor-0\" (UID: \"d7541f91-8a95-4a0e-9cdd-95252f38710b\") " pod="openstack/nova-cell1-conductor-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.057052 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-config-data\") pod \"nova-metadata-0\" (UID: \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\") " pod="openstack/nova-metadata-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.057126 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\") " pod="openstack/nova-metadata-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.057157 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7541f91-8a95-4a0e-9cdd-95252f38710b-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d7541f91-8a95-4a0e-9cdd-95252f38710b\") " pod="openstack/nova-cell1-conductor-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.062686 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7541f91-8a95-4a0e-9cdd-95252f38710b-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d7541f91-8a95-4a0e-9cdd-95252f38710b\") " pod="openstack/nova-cell1-conductor-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.062985 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7541f91-8a95-4a0e-9cdd-95252f38710b-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d7541f91-8a95-4a0e-9cdd-95252f38710b\") " pod="openstack/nova-cell1-conductor-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.071338 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5sk2c\" (UniqueName: \"kubernetes.io/projected/d7541f91-8a95-4a0e-9cdd-95252f38710b-kube-api-access-5sk2c\") pod \"nova-cell1-conductor-0\" (UID: \"d7541f91-8a95-4a0e-9cdd-95252f38710b\") " pod="openstack/nova-cell1-conductor-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.158700 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\") " pod="openstack/nova-metadata-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.158880 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-logs\") pod \"nova-metadata-0\" (UID: \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\") " pod="openstack/nova-metadata-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.158990 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\") " pod="openstack/nova-metadata-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.159099 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sn57z\" (UniqueName: \"kubernetes.io/projected/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-kube-api-access-sn57z\") pod \"nova-metadata-0\" (UID: \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\") " pod="openstack/nova-metadata-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.159167 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-config-data\") pod \"nova-metadata-0\" (UID: \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\") " pod="openstack/nova-metadata-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.159522 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-logs\") pod \"nova-metadata-0\" (UID: \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\") " pod="openstack/nova-metadata-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.162629 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\") " pod="openstack/nova-metadata-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.162877 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-config-data\") pod \"nova-metadata-0\" (UID: \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\") " pod="openstack/nova-metadata-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.163006 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\") " pod="openstack/nova-metadata-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.172791 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sn57z\" (UniqueName: \"kubernetes.io/projected/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-kube-api-access-sn57z\") pod \"nova-metadata-0\" (UID: \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\") " pod="openstack/nova-metadata-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.183709 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.288051 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.579896 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 09 09:22:12 crc kubenswrapper[4710]: W1009 09:22:12.581809 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd7541f91_8a95_4a0e_9cdd_95252f38710b.slice/crio-98301dd1c8649cfeed9fa2ac9edcae1e14383f7fe2d64d5cca0c4f980a1e5576 WatchSource:0}: Error finding container 98301dd1c8649cfeed9fa2ac9edcae1e14383f7fe2d64d5cca0c4f980a1e5576: Status 404 returned error can't find the container with id 98301dd1c8649cfeed9fa2ac9edcae1e14383f7fe2d64d5cca0c4f980a1e5576 Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.699511 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.811591 4710 generic.go:334] "Generic (PLEG): container finished" podID="e80dad3d-99a6-42a9-8d55-c54f02dee2bd" containerID="8034ea960debf427021913fb5dc3aeae0f584145bb5af61c5407a2d175fb0abd" exitCode=0 Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.811700 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-nvq24" event={"ID":"e80dad3d-99a6-42a9-8d55-c54f02dee2bd","Type":"ContainerDied","Data":"8034ea960debf427021913fb5dc3aeae0f584145bb5af61c5407a2d175fb0abd"} Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.813819 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"d7541f91-8a95-4a0e-9cdd-95252f38710b","Type":"ContainerStarted","Data":"89cb451325544e9cfb59400e3082e3f617caa8974ab175f8346eeb488493e434"} Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.813861 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"d7541f91-8a95-4a0e-9cdd-95252f38710b","Type":"ContainerStarted","Data":"98301dd1c8649cfeed9fa2ac9edcae1e14383f7fe2d64d5cca0c4f980a1e5576"} Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.824970 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64b95ec3-a607-4b8f-8395-465c2af78e4f" path="/var/lib/kubelet/pods/64b95ec3-a607-4b8f-8395-465c2af78e4f/volumes" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.825627 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72717529-ae4b-4785-bf30-18d13068cddd" path="/var/lib/kubelet/pods/72717529-ae4b-4785-bf30-18d13068cddd/volumes" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.826542 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.826582 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e0a2a9-9a61-4202-a6e8-34ea5d035533","Type":"ContainerStarted","Data":"b906b329e0ea342b9e4d9e0ec203441ecc5add0b02d387061750835487e286bb"} Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.826598 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ae0c36e0-42d6-49e8-b9a4-bc41cc826169","Type":"ContainerStarted","Data":"ee007cce8278101c0ae23ac3b6af3620f58db1b55c502c5e4dd5d812961a0756"} Oct 09 09:22:12 crc kubenswrapper[4710]: I1009 09:22:12.853998 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=1.8539765620000002 podStartE2EDuration="1.853976562s" podCreationTimestamp="2025-10-09 09:22:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:22:12.847893241 +0000 UTC m=+1056.338001637" watchObservedRunningTime="2025-10-09 09:22:12.853976562 +0000 UTC m=+1056.344084960" Oct 09 09:22:13 crc kubenswrapper[4710]: I1009 09:22:13.832778 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e0a2a9-9a61-4202-a6e8-34ea5d035533","Type":"ContainerStarted","Data":"c1b4461e4e77e572b0dc308bd0e19e344db35200713ae1c650af59a4fd2b775f"} Oct 09 09:22:13 crc kubenswrapper[4710]: I1009 09:22:13.836682 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ae0c36e0-42d6-49e8-b9a4-bc41cc826169","Type":"ContainerStarted","Data":"df8bfd702a77ba962820156a3389e76b8d06376ff16b14ddb020cee2c5ed606b"} Oct 09 09:22:13 crc kubenswrapper[4710]: I1009 09:22:13.836757 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ae0c36e0-42d6-49e8-b9a4-bc41cc826169","Type":"ContainerStarted","Data":"790bc9f8e1af9d8324bbb3e4cf29ec51847c8993a7b48081d0a786fc3f7a2df0"} Oct 09 09:22:13 crc kubenswrapper[4710]: I1009 09:22:13.862765 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.8627411670000003 podStartE2EDuration="2.862741167s" podCreationTimestamp="2025-10-09 09:22:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:22:13.855063819 +0000 UTC m=+1057.345172216" watchObservedRunningTime="2025-10-09 09:22:13.862741167 +0000 UTC m=+1057.352849563" Oct 09 09:22:13 crc kubenswrapper[4710]: I1009 09:22:13.955482 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 09 09:22:13 crc kubenswrapper[4710]: I1009 09:22:13.981887 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.133929 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.133978 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.198672 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-75fb48c489-59xpd" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.255669 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-nvq24" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.261331 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bdc9d6cdc-lv29t"] Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.261571 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" podUID="f6159738-132a-43a9-a072-4925e12092b1" containerName="dnsmasq-dns" containerID="cri-o://722a359f11280231152133e32107272fbe684d6adcf7fa0da9f39e9cd5bb0cd4" gracePeriod=10 Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.317350 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9pnf2\" (UniqueName: \"kubernetes.io/projected/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-kube-api-access-9pnf2\") pod \"e80dad3d-99a6-42a9-8d55-c54f02dee2bd\" (UID: \"e80dad3d-99a6-42a9-8d55-c54f02dee2bd\") " Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.317708 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-scripts\") pod \"e80dad3d-99a6-42a9-8d55-c54f02dee2bd\" (UID: \"e80dad3d-99a6-42a9-8d55-c54f02dee2bd\") " Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.317843 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-combined-ca-bundle\") pod \"e80dad3d-99a6-42a9-8d55-c54f02dee2bd\" (UID: \"e80dad3d-99a6-42a9-8d55-c54f02dee2bd\") " Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.317871 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-config-data\") pod \"e80dad3d-99a6-42a9-8d55-c54f02dee2bd\" (UID: \"e80dad3d-99a6-42a9-8d55-c54f02dee2bd\") " Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.326473 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-scripts" (OuterVolumeSpecName: "scripts") pod "e80dad3d-99a6-42a9-8d55-c54f02dee2bd" (UID: "e80dad3d-99a6-42a9-8d55-c54f02dee2bd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.349736 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-kube-api-access-9pnf2" (OuterVolumeSpecName: "kube-api-access-9pnf2") pod "e80dad3d-99a6-42a9-8d55-c54f02dee2bd" (UID: "e80dad3d-99a6-42a9-8d55-c54f02dee2bd"). InnerVolumeSpecName "kube-api-access-9pnf2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.390140 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-config-data" (OuterVolumeSpecName: "config-data") pod "e80dad3d-99a6-42a9-8d55-c54f02dee2bd" (UID: "e80dad3d-99a6-42a9-8d55-c54f02dee2bd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.403642 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e80dad3d-99a6-42a9-8d55-c54f02dee2bd" (UID: "e80dad3d-99a6-42a9-8d55-c54f02dee2bd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.420511 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.420538 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.420551 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9pnf2\" (UniqueName: \"kubernetes.io/projected/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-kube-api-access-9pnf2\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.420574 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e80dad3d-99a6-42a9-8d55-c54f02dee2bd-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.713730 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.828962 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-dns-svc\") pod \"f6159738-132a-43a9-a072-4925e12092b1\" (UID: \"f6159738-132a-43a9-a072-4925e12092b1\") " Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.829300 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jdfx\" (UniqueName: \"kubernetes.io/projected/f6159738-132a-43a9-a072-4925e12092b1-kube-api-access-9jdfx\") pod \"f6159738-132a-43a9-a072-4925e12092b1\" (UID: \"f6159738-132a-43a9-a072-4925e12092b1\") " Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.829332 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-ovsdbserver-nb\") pod \"f6159738-132a-43a9-a072-4925e12092b1\" (UID: \"f6159738-132a-43a9-a072-4925e12092b1\") " Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.829478 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-ovsdbserver-sb\") pod \"f6159738-132a-43a9-a072-4925e12092b1\" (UID: \"f6159738-132a-43a9-a072-4925e12092b1\") " Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.830239 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-config\") pod \"f6159738-132a-43a9-a072-4925e12092b1\" (UID: \"f6159738-132a-43a9-a072-4925e12092b1\") " Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.860473 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6159738-132a-43a9-a072-4925e12092b1-kube-api-access-9jdfx" (OuterVolumeSpecName: "kube-api-access-9jdfx") pod "f6159738-132a-43a9-a072-4925e12092b1" (UID: "f6159738-132a-43a9-a072-4925e12092b1"). InnerVolumeSpecName "kube-api-access-9jdfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.881985 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-nvq24" event={"ID":"e80dad3d-99a6-42a9-8d55-c54f02dee2bd","Type":"ContainerDied","Data":"4baeceb6f2c9809b6de06da236406b5b9487159465eb4da517b0997a9d91d0ea"} Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.882037 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4baeceb6f2c9809b6de06da236406b5b9487159465eb4da517b0997a9d91d0ea" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.882186 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-nvq24" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.886370 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f6159738-132a-43a9-a072-4925e12092b1" (UID: "f6159738-132a-43a9-a072-4925e12092b1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.901034 4710 generic.go:334] "Generic (PLEG): container finished" podID="f6159738-132a-43a9-a072-4925e12092b1" containerID="722a359f11280231152133e32107272fbe684d6adcf7fa0da9f39e9cd5bb0cd4" exitCode=0 Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.901146 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.901161 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" event={"ID":"f6159738-132a-43a9-a072-4925e12092b1","Type":"ContainerDied","Data":"722a359f11280231152133e32107272fbe684d6adcf7fa0da9f39e9cd5bb0cd4"} Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.901196 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bdc9d6cdc-lv29t" event={"ID":"f6159738-132a-43a9-a072-4925e12092b1","Type":"ContainerDied","Data":"700ace9e83b873378e72b2aea8136eb48850f72a18fc8950546e09b0e6f9b2b5"} Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.901214 4710 scope.go:117] "RemoveContainer" containerID="722a359f11280231152133e32107272fbe684d6adcf7fa0da9f39e9cd5bb0cd4" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.917069 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e0a2a9-9a61-4202-a6e8-34ea5d035533","Type":"ContainerStarted","Data":"65bd2074ea941d8e5a32f9147fd9fd83d5a8741e5b37c64d2dee2bf5679be340"} Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.932990 4710 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.933016 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jdfx\" (UniqueName: \"kubernetes.io/projected/f6159738-132a-43a9-a072-4925e12092b1-kube-api-access-9jdfx\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.945210 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f6159738-132a-43a9-a072-4925e12092b1" (UID: "f6159738-132a-43a9-a072-4925e12092b1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.953852 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-config" (OuterVolumeSpecName: "config") pod "f6159738-132a-43a9-a072-4925e12092b1" (UID: "f6159738-132a-43a9-a072-4925e12092b1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.975114 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.975377 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="0c6c2b3e-1323-4927-a3c7-bcd0dce561f6" containerName="nova-api-log" containerID="cri-o://f546ded406e305f2038d1b7b0ef026dfc164883c90815e8bffbe73924ddb988f" gracePeriod=30 Oct 09 09:22:14 crc kubenswrapper[4710]: I1009 09:22:14.975922 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="0c6c2b3e-1323-4927-a3c7-bcd0dce561f6" containerName="nova-api-api" containerID="cri-o://4b27d03371d77cf40f5fa7d94f8231959167cb8b3bff1a45ccce1e9e6bccbbc4" gracePeriod=30 Oct 09 09:22:15 crc kubenswrapper[4710]: I1009 09:22:15.024590 4710 scope.go:117] "RemoveContainer" containerID="2191a6bdf8db1147f45d40e382bd918b598f30ff654e2671995af4f8fd6dabbe" Oct 09 09:22:15 crc kubenswrapper[4710]: I1009 09:22:15.024709 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0c6c2b3e-1323-4927-a3c7-bcd0dce561f6" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.173:8774/\": EOF" Oct 09 09:22:15 crc kubenswrapper[4710]: I1009 09:22:15.024808 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 09 09:22:15 crc kubenswrapper[4710]: I1009 09:22:15.025186 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f6159738-132a-43a9-a072-4925e12092b1" (UID: "f6159738-132a-43a9-a072-4925e12092b1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:22:15 crc kubenswrapper[4710]: I1009 09:22:15.025529 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 09:22:15 crc kubenswrapper[4710]: I1009 09:22:15.025805 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0c6c2b3e-1323-4927-a3c7-bcd0dce561f6" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.173:8774/\": EOF" Oct 09 09:22:15 crc kubenswrapper[4710]: I1009 09:22:15.034942 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:15 crc kubenswrapper[4710]: I1009 09:22:15.034966 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:15 crc kubenswrapper[4710]: I1009 09:22:15.034975 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f6159738-132a-43a9-a072-4925e12092b1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:15 crc kubenswrapper[4710]: I1009 09:22:15.048168 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 09:22:15 crc kubenswrapper[4710]: I1009 09:22:15.056654 4710 scope.go:117] "RemoveContainer" containerID="722a359f11280231152133e32107272fbe684d6adcf7fa0da9f39e9cd5bb0cd4" Oct 09 09:22:15 crc kubenswrapper[4710]: E1009 09:22:15.060145 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"722a359f11280231152133e32107272fbe684d6adcf7fa0da9f39e9cd5bb0cd4\": container with ID starting with 722a359f11280231152133e32107272fbe684d6adcf7fa0da9f39e9cd5bb0cd4 not found: ID does not exist" containerID="722a359f11280231152133e32107272fbe684d6adcf7fa0da9f39e9cd5bb0cd4" Oct 09 09:22:15 crc kubenswrapper[4710]: I1009 09:22:15.060191 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"722a359f11280231152133e32107272fbe684d6adcf7fa0da9f39e9cd5bb0cd4"} err="failed to get container status \"722a359f11280231152133e32107272fbe684d6adcf7fa0da9f39e9cd5bb0cd4\": rpc error: code = NotFound desc = could not find container \"722a359f11280231152133e32107272fbe684d6adcf7fa0da9f39e9cd5bb0cd4\": container with ID starting with 722a359f11280231152133e32107272fbe684d6adcf7fa0da9f39e9cd5bb0cd4 not found: ID does not exist" Oct 09 09:22:15 crc kubenswrapper[4710]: I1009 09:22:15.060219 4710 scope.go:117] "RemoveContainer" containerID="2191a6bdf8db1147f45d40e382bd918b598f30ff654e2671995af4f8fd6dabbe" Oct 09 09:22:15 crc kubenswrapper[4710]: E1009 09:22:15.060803 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2191a6bdf8db1147f45d40e382bd918b598f30ff654e2671995af4f8fd6dabbe\": container with ID starting with 2191a6bdf8db1147f45d40e382bd918b598f30ff654e2671995af4f8fd6dabbe not found: ID does not exist" containerID="2191a6bdf8db1147f45d40e382bd918b598f30ff654e2671995af4f8fd6dabbe" Oct 09 09:22:15 crc kubenswrapper[4710]: I1009 09:22:15.060832 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2191a6bdf8db1147f45d40e382bd918b598f30ff654e2671995af4f8fd6dabbe"} err="failed to get container status \"2191a6bdf8db1147f45d40e382bd918b598f30ff654e2671995af4f8fd6dabbe\": rpc error: code = NotFound desc = could not find container \"2191a6bdf8db1147f45d40e382bd918b598f30ff654e2671995af4f8fd6dabbe\": container with ID starting with 2191a6bdf8db1147f45d40e382bd918b598f30ff654e2671995af4f8fd6dabbe not found: ID does not exist" Oct 09 09:22:15 crc kubenswrapper[4710]: I1009 09:22:15.232599 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bdc9d6cdc-lv29t"] Oct 09 09:22:15 crc kubenswrapper[4710]: I1009 09:22:15.240721 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7bdc9d6cdc-lv29t"] Oct 09 09:22:15 crc kubenswrapper[4710]: I1009 09:22:15.929368 4710 generic.go:334] "Generic (PLEG): container finished" podID="0c6c2b3e-1323-4927-a3c7-bcd0dce561f6" containerID="f546ded406e305f2038d1b7b0ef026dfc164883c90815e8bffbe73924ddb988f" exitCode=143 Oct 09 09:22:15 crc kubenswrapper[4710]: I1009 09:22:15.929462 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6","Type":"ContainerDied","Data":"f546ded406e305f2038d1b7b0ef026dfc164883c90815e8bffbe73924ddb988f"} Oct 09 09:22:15 crc kubenswrapper[4710]: I1009 09:22:15.931352 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ae0c36e0-42d6-49e8-b9a4-bc41cc826169" containerName="nova-metadata-log" containerID="cri-o://790bc9f8e1af9d8324bbb3e4cf29ec51847c8993a7b48081d0a786fc3f7a2df0" gracePeriod=30 Oct 09 09:22:15 crc kubenswrapper[4710]: I1009 09:22:15.933916 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="ae0c36e0-42d6-49e8-b9a4-bc41cc826169" containerName="nova-metadata-metadata" containerID="cri-o://df8bfd702a77ba962820156a3389e76b8d06376ff16b14ddb020cee2c5ed606b" gracePeriod=30 Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.401603 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.461642 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-logs\") pod \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\" (UID: \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\") " Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.461676 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sn57z\" (UniqueName: \"kubernetes.io/projected/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-kube-api-access-sn57z\") pod \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\" (UID: \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\") " Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.461758 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-nova-metadata-tls-certs\") pod \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\" (UID: \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\") " Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.461810 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-combined-ca-bundle\") pod \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\" (UID: \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\") " Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.461859 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-config-data\") pod \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\" (UID: \"ae0c36e0-42d6-49e8-b9a4-bc41cc826169\") " Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.461943 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-logs" (OuterVolumeSpecName: "logs") pod "ae0c36e0-42d6-49e8-b9a4-bc41cc826169" (UID: "ae0c36e0-42d6-49e8-b9a4-bc41cc826169"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.462112 4710 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-logs\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.466094 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-kube-api-access-sn57z" (OuterVolumeSpecName: "kube-api-access-sn57z") pod "ae0c36e0-42d6-49e8-b9a4-bc41cc826169" (UID: "ae0c36e0-42d6-49e8-b9a4-bc41cc826169"). InnerVolumeSpecName "kube-api-access-sn57z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.482508 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-config-data" (OuterVolumeSpecName: "config-data") pod "ae0c36e0-42d6-49e8-b9a4-bc41cc826169" (UID: "ae0c36e0-42d6-49e8-b9a4-bc41cc826169"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.486026 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ae0c36e0-42d6-49e8-b9a4-bc41cc826169" (UID: "ae0c36e0-42d6-49e8-b9a4-bc41cc826169"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.499582 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "ae0c36e0-42d6-49e8-b9a4-bc41cc826169" (UID: "ae0c36e0-42d6-49e8-b9a4-bc41cc826169"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.562994 4710 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.563020 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.563030 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.563040 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sn57z\" (UniqueName: \"kubernetes.io/projected/ae0c36e0-42d6-49e8-b9a4-bc41cc826169-kube-api-access-sn57z\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.827486 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6159738-132a-43a9-a072-4925e12092b1" path="/var/lib/kubelet/pods/f6159738-132a-43a9-a072-4925e12092b1/volumes" Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.942590 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e0a2a9-9a61-4202-a6e8-34ea5d035533","Type":"ContainerStarted","Data":"4c9d4e523c69561e27f201a6926ed6c7dd788c165f8b788ea9d687942bfc2f97"} Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.943824 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.945337 4710 generic.go:334] "Generic (PLEG): container finished" podID="ae0c36e0-42d6-49e8-b9a4-bc41cc826169" containerID="df8bfd702a77ba962820156a3389e76b8d06376ff16b14ddb020cee2c5ed606b" exitCode=0 Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.945373 4710 generic.go:334] "Generic (PLEG): container finished" podID="ae0c36e0-42d6-49e8-b9a4-bc41cc826169" containerID="790bc9f8e1af9d8324bbb3e4cf29ec51847c8993a7b48081d0a786fc3f7a2df0" exitCode=143 Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.945514 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="f7f9a021-79c5-43c1-8437-593690db947c" containerName="nova-scheduler-scheduler" containerID="cri-o://0481b7ce97bf657d1d31efb2b88381bb45e0becdc9d77b7ee4eb56ea61e8c957" gracePeriod=30 Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.945755 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.946218 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ae0c36e0-42d6-49e8-b9a4-bc41cc826169","Type":"ContainerDied","Data":"df8bfd702a77ba962820156a3389e76b8d06376ff16b14ddb020cee2c5ed606b"} Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.946238 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ae0c36e0-42d6-49e8-b9a4-bc41cc826169","Type":"ContainerDied","Data":"790bc9f8e1af9d8324bbb3e4cf29ec51847c8993a7b48081d0a786fc3f7a2df0"} Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.946249 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ae0c36e0-42d6-49e8-b9a4-bc41cc826169","Type":"ContainerDied","Data":"ee007cce8278101c0ae23ac3b6af3620f58db1b55c502c5e4dd5d812961a0756"} Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.946263 4710 scope.go:117] "RemoveContainer" containerID="df8bfd702a77ba962820156a3389e76b8d06376ff16b14ddb020cee2c5ed606b" Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.977910 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.926875721 podStartE2EDuration="6.977891428s" podCreationTimestamp="2025-10-09 09:22:10 +0000 UTC" firstStartedPulling="2025-10-09 09:22:11.699571941 +0000 UTC m=+1055.189680338" lastFinishedPulling="2025-10-09 09:22:15.750587648 +0000 UTC m=+1059.240696045" observedRunningTime="2025-10-09 09:22:16.974837098 +0000 UTC m=+1060.464945495" watchObservedRunningTime="2025-10-09 09:22:16.977891428 +0000 UTC m=+1060.467999825" Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.988375 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.996296 4710 scope.go:117] "RemoveContainer" containerID="790bc9f8e1af9d8324bbb3e4cf29ec51847c8993a7b48081d0a786fc3f7a2df0" Oct 09 09:22:16 crc kubenswrapper[4710]: I1009 09:22:16.996331 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.011749 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 09 09:22:17 crc kubenswrapper[4710]: E1009 09:22:17.012193 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae0c36e0-42d6-49e8-b9a4-bc41cc826169" containerName="nova-metadata-log" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.012216 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae0c36e0-42d6-49e8-b9a4-bc41cc826169" containerName="nova-metadata-log" Oct 09 09:22:17 crc kubenswrapper[4710]: E1009 09:22:17.012234 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6159738-132a-43a9-a072-4925e12092b1" containerName="dnsmasq-dns" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.012241 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6159738-132a-43a9-a072-4925e12092b1" containerName="dnsmasq-dns" Oct 09 09:22:17 crc kubenswrapper[4710]: E1009 09:22:17.012249 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e80dad3d-99a6-42a9-8d55-c54f02dee2bd" containerName="nova-manage" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.012257 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="e80dad3d-99a6-42a9-8d55-c54f02dee2bd" containerName="nova-manage" Oct 09 09:22:17 crc kubenswrapper[4710]: E1009 09:22:17.012283 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6159738-132a-43a9-a072-4925e12092b1" containerName="init" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.012291 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6159738-132a-43a9-a072-4925e12092b1" containerName="init" Oct 09 09:22:17 crc kubenswrapper[4710]: E1009 09:22:17.012298 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae0c36e0-42d6-49e8-b9a4-bc41cc826169" containerName="nova-metadata-metadata" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.012305 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae0c36e0-42d6-49e8-b9a4-bc41cc826169" containerName="nova-metadata-metadata" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.012522 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae0c36e0-42d6-49e8-b9a4-bc41cc826169" containerName="nova-metadata-log" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.012537 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae0c36e0-42d6-49e8-b9a4-bc41cc826169" containerName="nova-metadata-metadata" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.012556 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="e80dad3d-99a6-42a9-8d55-c54f02dee2bd" containerName="nova-manage" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.012568 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6159738-132a-43a9-a072-4925e12092b1" containerName="dnsmasq-dns" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.013540 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.018395 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.020748 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.022942 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.032736 4710 scope.go:117] "RemoveContainer" containerID="df8bfd702a77ba962820156a3389e76b8d06376ff16b14ddb020cee2c5ed606b" Oct 09 09:22:17 crc kubenswrapper[4710]: E1009 09:22:17.037649 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df8bfd702a77ba962820156a3389e76b8d06376ff16b14ddb020cee2c5ed606b\": container with ID starting with df8bfd702a77ba962820156a3389e76b8d06376ff16b14ddb020cee2c5ed606b not found: ID does not exist" containerID="df8bfd702a77ba962820156a3389e76b8d06376ff16b14ddb020cee2c5ed606b" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.037686 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df8bfd702a77ba962820156a3389e76b8d06376ff16b14ddb020cee2c5ed606b"} err="failed to get container status \"df8bfd702a77ba962820156a3389e76b8d06376ff16b14ddb020cee2c5ed606b\": rpc error: code = NotFound desc = could not find container \"df8bfd702a77ba962820156a3389e76b8d06376ff16b14ddb020cee2c5ed606b\": container with ID starting with df8bfd702a77ba962820156a3389e76b8d06376ff16b14ddb020cee2c5ed606b not found: ID does not exist" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.037716 4710 scope.go:117] "RemoveContainer" containerID="790bc9f8e1af9d8324bbb3e4cf29ec51847c8993a7b48081d0a786fc3f7a2df0" Oct 09 09:22:17 crc kubenswrapper[4710]: E1009 09:22:17.039511 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"790bc9f8e1af9d8324bbb3e4cf29ec51847c8993a7b48081d0a786fc3f7a2df0\": container with ID starting with 790bc9f8e1af9d8324bbb3e4cf29ec51847c8993a7b48081d0a786fc3f7a2df0 not found: ID does not exist" containerID="790bc9f8e1af9d8324bbb3e4cf29ec51847c8993a7b48081d0a786fc3f7a2df0" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.039548 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"790bc9f8e1af9d8324bbb3e4cf29ec51847c8993a7b48081d0a786fc3f7a2df0"} err="failed to get container status \"790bc9f8e1af9d8324bbb3e4cf29ec51847c8993a7b48081d0a786fc3f7a2df0\": rpc error: code = NotFound desc = could not find container \"790bc9f8e1af9d8324bbb3e4cf29ec51847c8993a7b48081d0a786fc3f7a2df0\": container with ID starting with 790bc9f8e1af9d8324bbb3e4cf29ec51847c8993a7b48081d0a786fc3f7a2df0 not found: ID does not exist" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.039571 4710 scope.go:117] "RemoveContainer" containerID="df8bfd702a77ba962820156a3389e76b8d06376ff16b14ddb020cee2c5ed606b" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.040972 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df8bfd702a77ba962820156a3389e76b8d06376ff16b14ddb020cee2c5ed606b"} err="failed to get container status \"df8bfd702a77ba962820156a3389e76b8d06376ff16b14ddb020cee2c5ed606b\": rpc error: code = NotFound desc = could not find container \"df8bfd702a77ba962820156a3389e76b8d06376ff16b14ddb020cee2c5ed606b\": container with ID starting with df8bfd702a77ba962820156a3389e76b8d06376ff16b14ddb020cee2c5ed606b not found: ID does not exist" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.041001 4710 scope.go:117] "RemoveContainer" containerID="790bc9f8e1af9d8324bbb3e4cf29ec51847c8993a7b48081d0a786fc3f7a2df0" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.041203 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"790bc9f8e1af9d8324bbb3e4cf29ec51847c8993a7b48081d0a786fc3f7a2df0"} err="failed to get container status \"790bc9f8e1af9d8324bbb3e4cf29ec51847c8993a7b48081d0a786fc3f7a2df0\": rpc error: code = NotFound desc = could not find container \"790bc9f8e1af9d8324bbb3e4cf29ec51847c8993a7b48081d0a786fc3f7a2df0\": container with ID starting with 790bc9f8e1af9d8324bbb3e4cf29ec51847c8993a7b48081d0a786fc3f7a2df0 not found: ID does not exist" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.075753 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzvjk\" (UniqueName: \"kubernetes.io/projected/36765fd5-2419-4e33-bdb9-425b7029d237-kube-api-access-dzvjk\") pod \"nova-metadata-0\" (UID: \"36765fd5-2419-4e33-bdb9-425b7029d237\") " pod="openstack/nova-metadata-0" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.075806 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36765fd5-2419-4e33-bdb9-425b7029d237-logs\") pod \"nova-metadata-0\" (UID: \"36765fd5-2419-4e33-bdb9-425b7029d237\") " pod="openstack/nova-metadata-0" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.075947 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36765fd5-2419-4e33-bdb9-425b7029d237-config-data\") pod \"nova-metadata-0\" (UID: \"36765fd5-2419-4e33-bdb9-425b7029d237\") " pod="openstack/nova-metadata-0" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.076078 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36765fd5-2419-4e33-bdb9-425b7029d237-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"36765fd5-2419-4e33-bdb9-425b7029d237\") " pod="openstack/nova-metadata-0" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.076167 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/36765fd5-2419-4e33-bdb9-425b7029d237-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"36765fd5-2419-4e33-bdb9-425b7029d237\") " pod="openstack/nova-metadata-0" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.177574 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36765fd5-2419-4e33-bdb9-425b7029d237-config-data\") pod \"nova-metadata-0\" (UID: \"36765fd5-2419-4e33-bdb9-425b7029d237\") " pod="openstack/nova-metadata-0" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.177651 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36765fd5-2419-4e33-bdb9-425b7029d237-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"36765fd5-2419-4e33-bdb9-425b7029d237\") " pod="openstack/nova-metadata-0" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.177685 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/36765fd5-2419-4e33-bdb9-425b7029d237-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"36765fd5-2419-4e33-bdb9-425b7029d237\") " pod="openstack/nova-metadata-0" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.177750 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzvjk\" (UniqueName: \"kubernetes.io/projected/36765fd5-2419-4e33-bdb9-425b7029d237-kube-api-access-dzvjk\") pod \"nova-metadata-0\" (UID: \"36765fd5-2419-4e33-bdb9-425b7029d237\") " pod="openstack/nova-metadata-0" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.178482 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36765fd5-2419-4e33-bdb9-425b7029d237-logs\") pod \"nova-metadata-0\" (UID: \"36765fd5-2419-4e33-bdb9-425b7029d237\") " pod="openstack/nova-metadata-0" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.178882 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36765fd5-2419-4e33-bdb9-425b7029d237-logs\") pod \"nova-metadata-0\" (UID: \"36765fd5-2419-4e33-bdb9-425b7029d237\") " pod="openstack/nova-metadata-0" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.181481 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/36765fd5-2419-4e33-bdb9-425b7029d237-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"36765fd5-2419-4e33-bdb9-425b7029d237\") " pod="openstack/nova-metadata-0" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.181556 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36765fd5-2419-4e33-bdb9-425b7029d237-config-data\") pod \"nova-metadata-0\" (UID: \"36765fd5-2419-4e33-bdb9-425b7029d237\") " pod="openstack/nova-metadata-0" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.183180 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36765fd5-2419-4e33-bdb9-425b7029d237-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"36765fd5-2419-4e33-bdb9-425b7029d237\") " pod="openstack/nova-metadata-0" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.201619 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzvjk\" (UniqueName: \"kubernetes.io/projected/36765fd5-2419-4e33-bdb9-425b7029d237-kube-api-access-dzvjk\") pod \"nova-metadata-0\" (UID: \"36765fd5-2419-4e33-bdb9-425b7029d237\") " pod="openstack/nova-metadata-0" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.208327 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.331675 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.775409 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.954895 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"36765fd5-2419-4e33-bdb9-425b7029d237","Type":"ContainerStarted","Data":"3d87525bd90591dedcdd0672d07d20596368a007f5b1c17ef7fca61055d650ab"} Oct 09 09:22:17 crc kubenswrapper[4710]: I1009 09:22:17.955836 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"36765fd5-2419-4e33-bdb9-425b7029d237","Type":"ContainerStarted","Data":"d94533e64c7773801048567f45a91ea240ea3d759730387ec82d73ab3bc4790a"} Oct 09 09:22:18 crc kubenswrapper[4710]: I1009 09:22:18.823478 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae0c36e0-42d6-49e8-b9a4-bc41cc826169" path="/var/lib/kubelet/pods/ae0c36e0-42d6-49e8-b9a4-bc41cc826169/volumes" Oct 09 09:22:18 crc kubenswrapper[4710]: E1009 09:22:18.955471 4710 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0481b7ce97bf657d1d31efb2b88381bb45e0becdc9d77b7ee4eb56ea61e8c957" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 09 09:22:18 crc kubenswrapper[4710]: E1009 09:22:18.956665 4710 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0481b7ce97bf657d1d31efb2b88381bb45e0becdc9d77b7ee4eb56ea61e8c957" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 09 09:22:18 crc kubenswrapper[4710]: E1009 09:22:18.957950 4710 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0481b7ce97bf657d1d31efb2b88381bb45e0becdc9d77b7ee4eb56ea61e8c957" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 09 09:22:18 crc kubenswrapper[4710]: E1009 09:22:18.958008 4710 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="f7f9a021-79c5-43c1-8437-593690db947c" containerName="nova-scheduler-scheduler" Oct 09 09:22:18 crc kubenswrapper[4710]: I1009 09:22:18.972525 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"36765fd5-2419-4e33-bdb9-425b7029d237","Type":"ContainerStarted","Data":"9a90fef6e48d1efdc5f61c4fc92a0fb3d81db80ef4c8c970b75fc84165a7b4f4"} Oct 09 09:22:19 crc kubenswrapper[4710]: I1009 09:22:19.437158 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 09:22:19 crc kubenswrapper[4710]: I1009 09:22:19.455667 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.455647518 podStartE2EDuration="3.455647518s" podCreationTimestamp="2025-10-09 09:22:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:22:18.990699754 +0000 UTC m=+1062.480808151" watchObservedRunningTime="2025-10-09 09:22:19.455647518 +0000 UTC m=+1062.945755915" Oct 09 09:22:19 crc kubenswrapper[4710]: I1009 09:22:19.525376 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7f9a021-79c5-43c1-8437-593690db947c-combined-ca-bundle\") pod \"f7f9a021-79c5-43c1-8437-593690db947c\" (UID: \"f7f9a021-79c5-43c1-8437-593690db947c\") " Oct 09 09:22:19 crc kubenswrapper[4710]: I1009 09:22:19.525497 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7f9a021-79c5-43c1-8437-593690db947c-config-data\") pod \"f7f9a021-79c5-43c1-8437-593690db947c\" (UID: \"f7f9a021-79c5-43c1-8437-593690db947c\") " Oct 09 09:22:19 crc kubenswrapper[4710]: I1009 09:22:19.525527 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dns99\" (UniqueName: \"kubernetes.io/projected/f7f9a021-79c5-43c1-8437-593690db947c-kube-api-access-dns99\") pod \"f7f9a021-79c5-43c1-8437-593690db947c\" (UID: \"f7f9a021-79c5-43c1-8437-593690db947c\") " Oct 09 09:22:19 crc kubenswrapper[4710]: I1009 09:22:19.530822 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7f9a021-79c5-43c1-8437-593690db947c-kube-api-access-dns99" (OuterVolumeSpecName: "kube-api-access-dns99") pod "f7f9a021-79c5-43c1-8437-593690db947c" (UID: "f7f9a021-79c5-43c1-8437-593690db947c"). InnerVolumeSpecName "kube-api-access-dns99". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:22:19 crc kubenswrapper[4710]: I1009 09:22:19.547208 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7f9a021-79c5-43c1-8437-593690db947c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f7f9a021-79c5-43c1-8437-593690db947c" (UID: "f7f9a021-79c5-43c1-8437-593690db947c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:19 crc kubenswrapper[4710]: I1009 09:22:19.547283 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7f9a021-79c5-43c1-8437-593690db947c-config-data" (OuterVolumeSpecName: "config-data") pod "f7f9a021-79c5-43c1-8437-593690db947c" (UID: "f7f9a021-79c5-43c1-8437-593690db947c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:19 crc kubenswrapper[4710]: I1009 09:22:19.628100 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7f9a021-79c5-43c1-8437-593690db947c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:19 crc kubenswrapper[4710]: I1009 09:22:19.628148 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7f9a021-79c5-43c1-8437-593690db947c-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:19 crc kubenswrapper[4710]: I1009 09:22:19.628158 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dns99\" (UniqueName: \"kubernetes.io/projected/f7f9a021-79c5-43c1-8437-593690db947c-kube-api-access-dns99\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:19 crc kubenswrapper[4710]: I1009 09:22:19.981977 4710 generic.go:334] "Generic (PLEG): container finished" podID="f7f9a021-79c5-43c1-8437-593690db947c" containerID="0481b7ce97bf657d1d31efb2b88381bb45e0becdc9d77b7ee4eb56ea61e8c957" exitCode=0 Oct 09 09:22:19 crc kubenswrapper[4710]: I1009 09:22:19.982070 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f7f9a021-79c5-43c1-8437-593690db947c","Type":"ContainerDied","Data":"0481b7ce97bf657d1d31efb2b88381bb45e0becdc9d77b7ee4eb56ea61e8c957"} Oct 09 09:22:19 crc kubenswrapper[4710]: I1009 09:22:19.982233 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f7f9a021-79c5-43c1-8437-593690db947c","Type":"ContainerDied","Data":"4651f26f06d5860fffdab9611eb8adc2ae5ff39bbd1d77df5372da4db771cef4"} Oct 09 09:22:19 crc kubenswrapper[4710]: I1009 09:22:19.982119 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 09:22:19 crc kubenswrapper[4710]: I1009 09:22:19.982297 4710 scope.go:117] "RemoveContainer" containerID="0481b7ce97bf657d1d31efb2b88381bb45e0becdc9d77b7ee4eb56ea61e8c957" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.009070 4710 scope.go:117] "RemoveContainer" containerID="0481b7ce97bf657d1d31efb2b88381bb45e0becdc9d77b7ee4eb56ea61e8c957" Oct 09 09:22:20 crc kubenswrapper[4710]: E1009 09:22:20.009473 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0481b7ce97bf657d1d31efb2b88381bb45e0becdc9d77b7ee4eb56ea61e8c957\": container with ID starting with 0481b7ce97bf657d1d31efb2b88381bb45e0becdc9d77b7ee4eb56ea61e8c957 not found: ID does not exist" containerID="0481b7ce97bf657d1d31efb2b88381bb45e0becdc9d77b7ee4eb56ea61e8c957" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.009510 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0481b7ce97bf657d1d31efb2b88381bb45e0becdc9d77b7ee4eb56ea61e8c957"} err="failed to get container status \"0481b7ce97bf657d1d31efb2b88381bb45e0becdc9d77b7ee4eb56ea61e8c957\": rpc error: code = NotFound desc = could not find container \"0481b7ce97bf657d1d31efb2b88381bb45e0becdc9d77b7ee4eb56ea61e8c957\": container with ID starting with 0481b7ce97bf657d1d31efb2b88381bb45e0becdc9d77b7ee4eb56ea61e8c957 not found: ID does not exist" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.010617 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.017260 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.029391 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 09:22:20 crc kubenswrapper[4710]: E1009 09:22:20.029842 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7f9a021-79c5-43c1-8437-593690db947c" containerName="nova-scheduler-scheduler" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.029910 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7f9a021-79c5-43c1-8437-593690db947c" containerName="nova-scheduler-scheduler" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.030202 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7f9a021-79c5-43c1-8437-593690db947c" containerName="nova-scheduler-scheduler" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.030882 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.038962 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.040575 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.137921 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pr7z8\" (UniqueName: \"kubernetes.io/projected/a2dcd8dc-03a7-4ffb-95bd-67b1a184df16-kube-api-access-pr7z8\") pod \"nova-scheduler-0\" (UID: \"a2dcd8dc-03a7-4ffb-95bd-67b1a184df16\") " pod="openstack/nova-scheduler-0" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.137997 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2dcd8dc-03a7-4ffb-95bd-67b1a184df16-config-data\") pod \"nova-scheduler-0\" (UID: \"a2dcd8dc-03a7-4ffb-95bd-67b1a184df16\") " pod="openstack/nova-scheduler-0" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.138056 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2dcd8dc-03a7-4ffb-95bd-67b1a184df16-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a2dcd8dc-03a7-4ffb-95bd-67b1a184df16\") " pod="openstack/nova-scheduler-0" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.240383 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pr7z8\" (UniqueName: \"kubernetes.io/projected/a2dcd8dc-03a7-4ffb-95bd-67b1a184df16-kube-api-access-pr7z8\") pod \"nova-scheduler-0\" (UID: \"a2dcd8dc-03a7-4ffb-95bd-67b1a184df16\") " pod="openstack/nova-scheduler-0" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.240646 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2dcd8dc-03a7-4ffb-95bd-67b1a184df16-config-data\") pod \"nova-scheduler-0\" (UID: \"a2dcd8dc-03a7-4ffb-95bd-67b1a184df16\") " pod="openstack/nova-scheduler-0" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.240698 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2dcd8dc-03a7-4ffb-95bd-67b1a184df16-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a2dcd8dc-03a7-4ffb-95bd-67b1a184df16\") " pod="openstack/nova-scheduler-0" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.247989 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2dcd8dc-03a7-4ffb-95bd-67b1a184df16-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a2dcd8dc-03a7-4ffb-95bd-67b1a184df16\") " pod="openstack/nova-scheduler-0" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.248877 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2dcd8dc-03a7-4ffb-95bd-67b1a184df16-config-data\") pod \"nova-scheduler-0\" (UID: \"a2dcd8dc-03a7-4ffb-95bd-67b1a184df16\") " pod="openstack/nova-scheduler-0" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.257844 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pr7z8\" (UniqueName: \"kubernetes.io/projected/a2dcd8dc-03a7-4ffb-95bd-67b1a184df16-kube-api-access-pr7z8\") pod \"nova-scheduler-0\" (UID: \"a2dcd8dc-03a7-4ffb-95bd-67b1a184df16\") " pod="openstack/nova-scheduler-0" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.344026 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 09:22:20 crc kubenswrapper[4710]: E1009 09:22:20.747240 4710 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0c6c2b3e_1323_4927_a3c7_bcd0dce561f6.slice/crio-4b27d03371d77cf40f5fa7d94f8231959167cb8b3bff1a45ccce1e9e6bccbbc4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0c6c2b3e_1323_4927_a3c7_bcd0dce561f6.slice/crio-conmon-4b27d03371d77cf40f5fa7d94f8231959167cb8b3bff1a45ccce1e9e6bccbbc4.scope\": RecentStats: unable to find data in memory cache]" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.824242 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7f9a021-79c5-43c1-8437-593690db947c" path="/var/lib/kubelet/pods/f7f9a021-79c5-43c1-8437-593690db947c/volumes" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.864656 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.865608 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.960870 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-logs\") pod \"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6\" (UID: \"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6\") " Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.961132 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g67fh\" (UniqueName: \"kubernetes.io/projected/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-kube-api-access-g67fh\") pod \"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6\" (UID: \"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6\") " Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.961262 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-combined-ca-bundle\") pod \"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6\" (UID: \"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6\") " Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.961312 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-config-data\") pod \"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6\" (UID: \"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6\") " Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.961346 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-logs" (OuterVolumeSpecName: "logs") pod "0c6c2b3e-1323-4927-a3c7-bcd0dce561f6" (UID: "0c6c2b3e-1323-4927-a3c7-bcd0dce561f6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.962035 4710 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-logs\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.971775 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-kube-api-access-g67fh" (OuterVolumeSpecName: "kube-api-access-g67fh") pod "0c6c2b3e-1323-4927-a3c7-bcd0dce561f6" (UID: "0c6c2b3e-1323-4927-a3c7-bcd0dce561f6"). InnerVolumeSpecName "kube-api-access-g67fh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.991334 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0c6c2b3e-1323-4927-a3c7-bcd0dce561f6" (UID: "0c6c2b3e-1323-4927-a3c7-bcd0dce561f6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.998261 4710 generic.go:334] "Generic (PLEG): container finished" podID="0c6c2b3e-1323-4927-a3c7-bcd0dce561f6" containerID="4b27d03371d77cf40f5fa7d94f8231959167cb8b3bff1a45ccce1e9e6bccbbc4" exitCode=0 Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.998403 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6","Type":"ContainerDied","Data":"4b27d03371d77cf40f5fa7d94f8231959167cb8b3bff1a45ccce1e9e6bccbbc4"} Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.998934 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0c6c2b3e-1323-4927-a3c7-bcd0dce561f6","Type":"ContainerDied","Data":"30b4deeb76dca68cad8b4dcaa6ed00a0913af29e3307e80cc3bfc4b017ab4449"} Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.998959 4710 scope.go:117] "RemoveContainer" containerID="4b27d03371d77cf40f5fa7d94f8231959167cb8b3bff1a45ccce1e9e6bccbbc4" Oct 09 09:22:20 crc kubenswrapper[4710]: I1009 09:22:20.998553 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.002488 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a2dcd8dc-03a7-4ffb-95bd-67b1a184df16","Type":"ContainerStarted","Data":"aea94590a2d11d91599a22cc050fd640454332205cd8adff7fa1d2cf3f86e76e"} Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.007560 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-config-data" (OuterVolumeSpecName: "config-data") pod "0c6c2b3e-1323-4927-a3c7-bcd0dce561f6" (UID: "0c6c2b3e-1323-4927-a3c7-bcd0dce561f6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.022086 4710 scope.go:117] "RemoveContainer" containerID="f546ded406e305f2038d1b7b0ef026dfc164883c90815e8bffbe73924ddb988f" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.040611 4710 scope.go:117] "RemoveContainer" containerID="4b27d03371d77cf40f5fa7d94f8231959167cb8b3bff1a45ccce1e9e6bccbbc4" Oct 09 09:22:21 crc kubenswrapper[4710]: E1009 09:22:21.041048 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b27d03371d77cf40f5fa7d94f8231959167cb8b3bff1a45ccce1e9e6bccbbc4\": container with ID starting with 4b27d03371d77cf40f5fa7d94f8231959167cb8b3bff1a45ccce1e9e6bccbbc4 not found: ID does not exist" containerID="4b27d03371d77cf40f5fa7d94f8231959167cb8b3bff1a45ccce1e9e6bccbbc4" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.041097 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b27d03371d77cf40f5fa7d94f8231959167cb8b3bff1a45ccce1e9e6bccbbc4"} err="failed to get container status \"4b27d03371d77cf40f5fa7d94f8231959167cb8b3bff1a45ccce1e9e6bccbbc4\": rpc error: code = NotFound desc = could not find container \"4b27d03371d77cf40f5fa7d94f8231959167cb8b3bff1a45ccce1e9e6bccbbc4\": container with ID starting with 4b27d03371d77cf40f5fa7d94f8231959167cb8b3bff1a45ccce1e9e6bccbbc4 not found: ID does not exist" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.041134 4710 scope.go:117] "RemoveContainer" containerID="f546ded406e305f2038d1b7b0ef026dfc164883c90815e8bffbe73924ddb988f" Oct 09 09:22:21 crc kubenswrapper[4710]: E1009 09:22:21.041692 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f546ded406e305f2038d1b7b0ef026dfc164883c90815e8bffbe73924ddb988f\": container with ID starting with f546ded406e305f2038d1b7b0ef026dfc164883c90815e8bffbe73924ddb988f not found: ID does not exist" containerID="f546ded406e305f2038d1b7b0ef026dfc164883c90815e8bffbe73924ddb988f" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.041734 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f546ded406e305f2038d1b7b0ef026dfc164883c90815e8bffbe73924ddb988f"} err="failed to get container status \"f546ded406e305f2038d1b7b0ef026dfc164883c90815e8bffbe73924ddb988f\": rpc error: code = NotFound desc = could not find container \"f546ded406e305f2038d1b7b0ef026dfc164883c90815e8bffbe73924ddb988f\": container with ID starting with f546ded406e305f2038d1b7b0ef026dfc164883c90815e8bffbe73924ddb988f not found: ID does not exist" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.064024 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g67fh\" (UniqueName: \"kubernetes.io/projected/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-kube-api-access-g67fh\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.064055 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.064066 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.327241 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.334639 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.352532 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 09 09:22:21 crc kubenswrapper[4710]: E1009 09:22:21.353007 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c6c2b3e-1323-4927-a3c7-bcd0dce561f6" containerName="nova-api-api" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.353029 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c6c2b3e-1323-4927-a3c7-bcd0dce561f6" containerName="nova-api-api" Oct 09 09:22:21 crc kubenswrapper[4710]: E1009 09:22:21.353047 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c6c2b3e-1323-4927-a3c7-bcd0dce561f6" containerName="nova-api-log" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.353054 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c6c2b3e-1323-4927-a3c7-bcd0dce561f6" containerName="nova-api-log" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.353271 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c6c2b3e-1323-4927-a3c7-bcd0dce561f6" containerName="nova-api-api" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.353299 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c6c2b3e-1323-4927-a3c7-bcd0dce561f6" containerName="nova-api-log" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.354593 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.361196 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.375808 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.470788 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52a99cb4-bb33-4b66-b424-abb4af7c180f-logs\") pod \"nova-api-0\" (UID: \"52a99cb4-bb33-4b66-b424-abb4af7c180f\") " pod="openstack/nova-api-0" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.470851 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52a99cb4-bb33-4b66-b424-abb4af7c180f-config-data\") pod \"nova-api-0\" (UID: \"52a99cb4-bb33-4b66-b424-abb4af7c180f\") " pod="openstack/nova-api-0" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.471186 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbnmj\" (UniqueName: \"kubernetes.io/projected/52a99cb4-bb33-4b66-b424-abb4af7c180f-kube-api-access-nbnmj\") pod \"nova-api-0\" (UID: \"52a99cb4-bb33-4b66-b424-abb4af7c180f\") " pod="openstack/nova-api-0" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.471227 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52a99cb4-bb33-4b66-b424-abb4af7c180f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"52a99cb4-bb33-4b66-b424-abb4af7c180f\") " pod="openstack/nova-api-0" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.572751 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52a99cb4-bb33-4b66-b424-abb4af7c180f-logs\") pod \"nova-api-0\" (UID: \"52a99cb4-bb33-4b66-b424-abb4af7c180f\") " pod="openstack/nova-api-0" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.572814 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52a99cb4-bb33-4b66-b424-abb4af7c180f-config-data\") pod \"nova-api-0\" (UID: \"52a99cb4-bb33-4b66-b424-abb4af7c180f\") " pod="openstack/nova-api-0" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.572883 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbnmj\" (UniqueName: \"kubernetes.io/projected/52a99cb4-bb33-4b66-b424-abb4af7c180f-kube-api-access-nbnmj\") pod \"nova-api-0\" (UID: \"52a99cb4-bb33-4b66-b424-abb4af7c180f\") " pod="openstack/nova-api-0" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.572911 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52a99cb4-bb33-4b66-b424-abb4af7c180f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"52a99cb4-bb33-4b66-b424-abb4af7c180f\") " pod="openstack/nova-api-0" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.573159 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52a99cb4-bb33-4b66-b424-abb4af7c180f-logs\") pod \"nova-api-0\" (UID: \"52a99cb4-bb33-4b66-b424-abb4af7c180f\") " pod="openstack/nova-api-0" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.578125 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52a99cb4-bb33-4b66-b424-abb4af7c180f-config-data\") pod \"nova-api-0\" (UID: \"52a99cb4-bb33-4b66-b424-abb4af7c180f\") " pod="openstack/nova-api-0" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.586562 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52a99cb4-bb33-4b66-b424-abb4af7c180f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"52a99cb4-bb33-4b66-b424-abb4af7c180f\") " pod="openstack/nova-api-0" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.587125 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbnmj\" (UniqueName: \"kubernetes.io/projected/52a99cb4-bb33-4b66-b424-abb4af7c180f-kube-api-access-nbnmj\") pod \"nova-api-0\" (UID: \"52a99cb4-bb33-4b66-b424-abb4af7c180f\") " pod="openstack/nova-api-0" Oct 09 09:22:21 crc kubenswrapper[4710]: I1009 09:22:21.667670 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 09:22:22 crc kubenswrapper[4710]: I1009 09:22:22.016169 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a2dcd8dc-03a7-4ffb-95bd-67b1a184df16","Type":"ContainerStarted","Data":"c4720832dd44256cdc4ac23949f1dfe1d0fbfe23bb84f54ef925cc2124fbfa4d"} Oct 09 09:22:22 crc kubenswrapper[4710]: I1009 09:22:22.029631 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.029604897 podStartE2EDuration="2.029604897s" podCreationTimestamp="2025-10-09 09:22:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:22:22.028234613 +0000 UTC m=+1065.518343010" watchObservedRunningTime="2025-10-09 09:22:22.029604897 +0000 UTC m=+1065.519713295" Oct 09 09:22:22 crc kubenswrapper[4710]: I1009 09:22:22.091635 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 09:22:22 crc kubenswrapper[4710]: I1009 09:22:22.332063 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 09 09:22:22 crc kubenswrapper[4710]: I1009 09:22:22.332281 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 09 09:22:22 crc kubenswrapper[4710]: I1009 09:22:22.824574 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c6c2b3e-1323-4927-a3c7-bcd0dce561f6" path="/var/lib/kubelet/pods/0c6c2b3e-1323-4927-a3c7-bcd0dce561f6/volumes" Oct 09 09:22:23 crc kubenswrapper[4710]: I1009 09:22:23.028709 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"52a99cb4-bb33-4b66-b424-abb4af7c180f","Type":"ContainerStarted","Data":"2f80d0367ac837f6cdd42e998411157751a4ce960aa777542c22751f057b39f9"} Oct 09 09:22:23 crc kubenswrapper[4710]: I1009 09:22:23.028786 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"52a99cb4-bb33-4b66-b424-abb4af7c180f","Type":"ContainerStarted","Data":"9be7ee728f2a4220e06002c9e8b95ee39d5b530274a34d5b5ff41bffef8e0f56"} Oct 09 09:22:23 crc kubenswrapper[4710]: I1009 09:22:23.028799 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"52a99cb4-bb33-4b66-b424-abb4af7c180f","Type":"ContainerStarted","Data":"34a25ee35e4afb68e7ba90b05999850c478f030b98e2fc5f43f3106e2b134489"} Oct 09 09:22:23 crc kubenswrapper[4710]: I1009 09:22:23.063053 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.063025636 podStartE2EDuration="2.063025636s" podCreationTimestamp="2025-10-09 09:22:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:22:23.04909686 +0000 UTC m=+1066.539205258" watchObservedRunningTime="2025-10-09 09:22:23.063025636 +0000 UTC m=+1066.553134033" Oct 09 09:22:25 crc kubenswrapper[4710]: I1009 09:22:25.344571 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 09 09:22:27 crc kubenswrapper[4710]: I1009 09:22:27.332072 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 09 09:22:27 crc kubenswrapper[4710]: I1009 09:22:27.332356 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 09 09:22:28 crc kubenswrapper[4710]: I1009 09:22:28.349585 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="36765fd5-2419-4e33-bdb9-425b7029d237" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.179:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 09:22:28 crc kubenswrapper[4710]: I1009 09:22:28.349592 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="36765fd5-2419-4e33-bdb9-425b7029d237" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.179:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 09 09:22:30 crc kubenswrapper[4710]: I1009 09:22:30.344141 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 09 09:22:30 crc kubenswrapper[4710]: I1009 09:22:30.366363 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 09 09:22:31 crc kubenswrapper[4710]: I1009 09:22:31.116185 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 09 09:22:31 crc kubenswrapper[4710]: I1009 09:22:31.668087 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 09 09:22:31 crc kubenswrapper[4710]: I1009 09:22:31.668143 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 09 09:22:32 crc kubenswrapper[4710]: I1009 09:22:32.750637 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="52a99cb4-bb33-4b66-b424-abb4af7c180f" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.181:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 09:22:32 crc kubenswrapper[4710]: I1009 09:22:32.751047 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="52a99cb4-bb33-4b66-b424-abb4af7c180f" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.181:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 09 09:22:35 crc kubenswrapper[4710]: I1009 09:22:35.545626 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:22:35 crc kubenswrapper[4710]: I1009 09:22:35.545709 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:22:37 crc kubenswrapper[4710]: I1009 09:22:37.337496 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 09 09:22:37 crc kubenswrapper[4710]: I1009 09:22:37.343382 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 09 09:22:37 crc kubenswrapper[4710]: I1009 09:22:37.343911 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 09 09:22:38 crc kubenswrapper[4710]: I1009 09:22:38.158056 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 09 09:22:39 crc kubenswrapper[4710]: I1009 09:22:39.079880 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:39 crc kubenswrapper[4710]: I1009 09:22:39.170110 4710 generic.go:334] "Generic (PLEG): container finished" podID="1e5a3792-1713-4d38-8bf3-ee149ed43e7d" containerID="775e0500c9f4dc8dfdf57d990b0af4bd4e00cfcdd06b328f23ed8a5fed6ed554" exitCode=137 Oct 09 09:22:39 crc kubenswrapper[4710]: I1009 09:22:39.170175 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"1e5a3792-1713-4d38-8bf3-ee149ed43e7d","Type":"ContainerDied","Data":"775e0500c9f4dc8dfdf57d990b0af4bd4e00cfcdd06b328f23ed8a5fed6ed554"} Oct 09 09:22:39 crc kubenswrapper[4710]: I1009 09:22:39.170233 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:39 crc kubenswrapper[4710]: I1009 09:22:39.170259 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"1e5a3792-1713-4d38-8bf3-ee149ed43e7d","Type":"ContainerDied","Data":"2864cb4e26c5afb0ed5beacf4aa86e8882565a35adff956d2b52ad6ef879f5ff"} Oct 09 09:22:39 crc kubenswrapper[4710]: I1009 09:22:39.170287 4710 scope.go:117] "RemoveContainer" containerID="775e0500c9f4dc8dfdf57d990b0af4bd4e00cfcdd06b328f23ed8a5fed6ed554" Oct 09 09:22:39 crc kubenswrapper[4710]: I1009 09:22:39.191489 4710 scope.go:117] "RemoveContainer" containerID="775e0500c9f4dc8dfdf57d990b0af4bd4e00cfcdd06b328f23ed8a5fed6ed554" Oct 09 09:22:39 crc kubenswrapper[4710]: E1009 09:22:39.191850 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"775e0500c9f4dc8dfdf57d990b0af4bd4e00cfcdd06b328f23ed8a5fed6ed554\": container with ID starting with 775e0500c9f4dc8dfdf57d990b0af4bd4e00cfcdd06b328f23ed8a5fed6ed554 not found: ID does not exist" containerID="775e0500c9f4dc8dfdf57d990b0af4bd4e00cfcdd06b328f23ed8a5fed6ed554" Oct 09 09:22:39 crc kubenswrapper[4710]: I1009 09:22:39.191902 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"775e0500c9f4dc8dfdf57d990b0af4bd4e00cfcdd06b328f23ed8a5fed6ed554"} err="failed to get container status \"775e0500c9f4dc8dfdf57d990b0af4bd4e00cfcdd06b328f23ed8a5fed6ed554\": rpc error: code = NotFound desc = could not find container \"775e0500c9f4dc8dfdf57d990b0af4bd4e00cfcdd06b328f23ed8a5fed6ed554\": container with ID starting with 775e0500c9f4dc8dfdf57d990b0af4bd4e00cfcdd06b328f23ed8a5fed6ed554 not found: ID does not exist" Oct 09 09:22:39 crc kubenswrapper[4710]: I1009 09:22:39.230470 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e5a3792-1713-4d38-8bf3-ee149ed43e7d-config-data\") pod \"1e5a3792-1713-4d38-8bf3-ee149ed43e7d\" (UID: \"1e5a3792-1713-4d38-8bf3-ee149ed43e7d\") " Oct 09 09:22:39 crc kubenswrapper[4710]: I1009 09:22:39.230680 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qqp4\" (UniqueName: \"kubernetes.io/projected/1e5a3792-1713-4d38-8bf3-ee149ed43e7d-kube-api-access-4qqp4\") pod \"1e5a3792-1713-4d38-8bf3-ee149ed43e7d\" (UID: \"1e5a3792-1713-4d38-8bf3-ee149ed43e7d\") " Oct 09 09:22:39 crc kubenswrapper[4710]: I1009 09:22:39.230941 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e5a3792-1713-4d38-8bf3-ee149ed43e7d-combined-ca-bundle\") pod \"1e5a3792-1713-4d38-8bf3-ee149ed43e7d\" (UID: \"1e5a3792-1713-4d38-8bf3-ee149ed43e7d\") " Oct 09 09:22:39 crc kubenswrapper[4710]: I1009 09:22:39.242371 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e5a3792-1713-4d38-8bf3-ee149ed43e7d-kube-api-access-4qqp4" (OuterVolumeSpecName: "kube-api-access-4qqp4") pod "1e5a3792-1713-4d38-8bf3-ee149ed43e7d" (UID: "1e5a3792-1713-4d38-8bf3-ee149ed43e7d"). InnerVolumeSpecName "kube-api-access-4qqp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:22:39 crc kubenswrapper[4710]: E1009 09:22:39.252211 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1e5a3792-1713-4d38-8bf3-ee149ed43e7d-combined-ca-bundle podName:1e5a3792-1713-4d38-8bf3-ee149ed43e7d nodeName:}" failed. No retries permitted until 2025-10-09 09:22:39.752180894 +0000 UTC m=+1083.242289291 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/1e5a3792-1713-4d38-8bf3-ee149ed43e7d-combined-ca-bundle") pod "1e5a3792-1713-4d38-8bf3-ee149ed43e7d" (UID: "1e5a3792-1713-4d38-8bf3-ee149ed43e7d") : error deleting /var/lib/kubelet/pods/1e5a3792-1713-4d38-8bf3-ee149ed43e7d/volume-subpaths: remove /var/lib/kubelet/pods/1e5a3792-1713-4d38-8bf3-ee149ed43e7d/volume-subpaths: no such file or directory Oct 09 09:22:39 crc kubenswrapper[4710]: I1009 09:22:39.254847 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e5a3792-1713-4d38-8bf3-ee149ed43e7d-config-data" (OuterVolumeSpecName: "config-data") pod "1e5a3792-1713-4d38-8bf3-ee149ed43e7d" (UID: "1e5a3792-1713-4d38-8bf3-ee149ed43e7d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:39 crc kubenswrapper[4710]: I1009 09:22:39.333988 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e5a3792-1713-4d38-8bf3-ee149ed43e7d-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:39 crc kubenswrapper[4710]: I1009 09:22:39.334028 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qqp4\" (UniqueName: \"kubernetes.io/projected/1e5a3792-1713-4d38-8bf3-ee149ed43e7d-kube-api-access-4qqp4\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:39 crc kubenswrapper[4710]: I1009 09:22:39.841768 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e5a3792-1713-4d38-8bf3-ee149ed43e7d-combined-ca-bundle\") pod \"1e5a3792-1713-4d38-8bf3-ee149ed43e7d\" (UID: \"1e5a3792-1713-4d38-8bf3-ee149ed43e7d\") " Oct 09 09:22:39 crc kubenswrapper[4710]: I1009 09:22:39.857885 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e5a3792-1713-4d38-8bf3-ee149ed43e7d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1e5a3792-1713-4d38-8bf3-ee149ed43e7d" (UID: "1e5a3792-1713-4d38-8bf3-ee149ed43e7d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:39 crc kubenswrapper[4710]: I1009 09:22:39.944462 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e5a3792-1713-4d38-8bf3-ee149ed43e7d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.099201 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.104684 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.152696 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 09:22:40 crc kubenswrapper[4710]: E1009 09:22:40.153822 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e5a3792-1713-4d38-8bf3-ee149ed43e7d" containerName="nova-cell1-novncproxy-novncproxy" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.153847 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e5a3792-1713-4d38-8bf3-ee149ed43e7d" containerName="nova-cell1-novncproxy-novncproxy" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.154369 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e5a3792-1713-4d38-8bf3-ee149ed43e7d" containerName="nova-cell1-novncproxy-novncproxy" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.155848 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.160512 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.160750 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.164666 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.178277 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.257698 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.257741 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.257822 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.257894 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.257923 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdppg\" (UniqueName: \"kubernetes.io/projected/b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4-kube-api-access-gdppg\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.359386 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.359497 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.359573 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.359609 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdppg\" (UniqueName: \"kubernetes.io/projected/b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4-kube-api-access-gdppg\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.359673 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.364327 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.364713 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.366567 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.371117 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.373444 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdppg\" (UniqueName: \"kubernetes.io/projected/b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4-kube-api-access-gdppg\") pod \"nova-cell1-novncproxy-0\" (UID: \"b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4\") " pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.485371 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.802678 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 09 09:22:40 crc kubenswrapper[4710]: I1009 09:22:40.834167 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e5a3792-1713-4d38-8bf3-ee149ed43e7d" path="/var/lib/kubelet/pods/1e5a3792-1713-4d38-8bf3-ee149ed43e7d/volumes" Oct 09 09:22:41 crc kubenswrapper[4710]: I1009 09:22:41.193235 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4","Type":"ContainerStarted","Data":"0599cef77979a866b3c40db05da64a23acc1d906628fe60a037a1b487049fe80"} Oct 09 09:22:41 crc kubenswrapper[4710]: I1009 09:22:41.193453 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4","Type":"ContainerStarted","Data":"5dc28ded98f24f0c696ccc900e0d2a0dc61ca09fb0b150735f8479e8f28e0dc8"} Oct 09 09:22:41 crc kubenswrapper[4710]: I1009 09:22:41.215181 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.215166488 podStartE2EDuration="1.215166488s" podCreationTimestamp="2025-10-09 09:22:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:22:41.207046106 +0000 UTC m=+1084.697154503" watchObservedRunningTime="2025-10-09 09:22:41.215166488 +0000 UTC m=+1084.705274886" Oct 09 09:22:41 crc kubenswrapper[4710]: I1009 09:22:41.266706 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 09 09:22:41 crc kubenswrapper[4710]: I1009 09:22:41.673217 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 09 09:22:41 crc kubenswrapper[4710]: I1009 09:22:41.673754 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 09 09:22:41 crc kubenswrapper[4710]: I1009 09:22:41.674652 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 09 09:22:41 crc kubenswrapper[4710]: I1009 09:22:41.677969 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 09 09:22:42 crc kubenswrapper[4710]: I1009 09:22:42.201549 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 09 09:22:42 crc kubenswrapper[4710]: I1009 09:22:42.207674 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 09 09:22:42 crc kubenswrapper[4710]: I1009 09:22:42.413105 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-665946c669-kdpds"] Oct 09 09:22:42 crc kubenswrapper[4710]: I1009 09:22:42.414877 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-665946c669-kdpds" Oct 09 09:22:42 crc kubenswrapper[4710]: I1009 09:22:42.443618 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-665946c669-kdpds"] Oct 09 09:22:42 crc kubenswrapper[4710]: I1009 09:22:42.515196 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-ovsdbserver-sb\") pod \"dnsmasq-dns-665946c669-kdpds\" (UID: \"b304fcec-3561-4932-b04d-5e04c64fbc7c\") " pod="openstack/dnsmasq-dns-665946c669-kdpds" Oct 09 09:22:42 crc kubenswrapper[4710]: I1009 09:22:42.515639 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-config\") pod \"dnsmasq-dns-665946c669-kdpds\" (UID: \"b304fcec-3561-4932-b04d-5e04c64fbc7c\") " pod="openstack/dnsmasq-dns-665946c669-kdpds" Oct 09 09:22:42 crc kubenswrapper[4710]: I1009 09:22:42.515696 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-ovsdbserver-nb\") pod \"dnsmasq-dns-665946c669-kdpds\" (UID: \"b304fcec-3561-4932-b04d-5e04c64fbc7c\") " pod="openstack/dnsmasq-dns-665946c669-kdpds" Oct 09 09:22:42 crc kubenswrapper[4710]: I1009 09:22:42.520807 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-dns-svc\") pod \"dnsmasq-dns-665946c669-kdpds\" (UID: \"b304fcec-3561-4932-b04d-5e04c64fbc7c\") " pod="openstack/dnsmasq-dns-665946c669-kdpds" Oct 09 09:22:42 crc kubenswrapper[4710]: I1009 09:22:42.521050 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c979g\" (UniqueName: \"kubernetes.io/projected/b304fcec-3561-4932-b04d-5e04c64fbc7c-kube-api-access-c979g\") pod \"dnsmasq-dns-665946c669-kdpds\" (UID: \"b304fcec-3561-4932-b04d-5e04c64fbc7c\") " pod="openstack/dnsmasq-dns-665946c669-kdpds" Oct 09 09:22:42 crc kubenswrapper[4710]: I1009 09:22:42.623303 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c979g\" (UniqueName: \"kubernetes.io/projected/b304fcec-3561-4932-b04d-5e04c64fbc7c-kube-api-access-c979g\") pod \"dnsmasq-dns-665946c669-kdpds\" (UID: \"b304fcec-3561-4932-b04d-5e04c64fbc7c\") " pod="openstack/dnsmasq-dns-665946c669-kdpds" Oct 09 09:22:42 crc kubenswrapper[4710]: I1009 09:22:42.623645 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-ovsdbserver-sb\") pod \"dnsmasq-dns-665946c669-kdpds\" (UID: \"b304fcec-3561-4932-b04d-5e04c64fbc7c\") " pod="openstack/dnsmasq-dns-665946c669-kdpds" Oct 09 09:22:42 crc kubenswrapper[4710]: I1009 09:22:42.623766 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-config\") pod \"dnsmasq-dns-665946c669-kdpds\" (UID: \"b304fcec-3561-4932-b04d-5e04c64fbc7c\") " pod="openstack/dnsmasq-dns-665946c669-kdpds" Oct 09 09:22:42 crc kubenswrapper[4710]: I1009 09:22:42.623856 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-ovsdbserver-nb\") pod \"dnsmasq-dns-665946c669-kdpds\" (UID: \"b304fcec-3561-4932-b04d-5e04c64fbc7c\") " pod="openstack/dnsmasq-dns-665946c669-kdpds" Oct 09 09:22:42 crc kubenswrapper[4710]: I1009 09:22:42.623984 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-dns-svc\") pod \"dnsmasq-dns-665946c669-kdpds\" (UID: \"b304fcec-3561-4932-b04d-5e04c64fbc7c\") " pod="openstack/dnsmasq-dns-665946c669-kdpds" Oct 09 09:22:42 crc kubenswrapper[4710]: I1009 09:22:42.624667 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-ovsdbserver-sb\") pod \"dnsmasq-dns-665946c669-kdpds\" (UID: \"b304fcec-3561-4932-b04d-5e04c64fbc7c\") " pod="openstack/dnsmasq-dns-665946c669-kdpds" Oct 09 09:22:42 crc kubenswrapper[4710]: I1009 09:22:42.624857 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-ovsdbserver-nb\") pod \"dnsmasq-dns-665946c669-kdpds\" (UID: \"b304fcec-3561-4932-b04d-5e04c64fbc7c\") " pod="openstack/dnsmasq-dns-665946c669-kdpds" Oct 09 09:22:42 crc kubenswrapper[4710]: I1009 09:22:42.624927 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-dns-svc\") pod \"dnsmasq-dns-665946c669-kdpds\" (UID: \"b304fcec-3561-4932-b04d-5e04c64fbc7c\") " pod="openstack/dnsmasq-dns-665946c669-kdpds" Oct 09 09:22:42 crc kubenswrapper[4710]: I1009 09:22:42.625404 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-config\") pod \"dnsmasq-dns-665946c669-kdpds\" (UID: \"b304fcec-3561-4932-b04d-5e04c64fbc7c\") " pod="openstack/dnsmasq-dns-665946c669-kdpds" Oct 09 09:22:42 crc kubenswrapper[4710]: I1009 09:22:42.653483 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c979g\" (UniqueName: \"kubernetes.io/projected/b304fcec-3561-4932-b04d-5e04c64fbc7c-kube-api-access-c979g\") pod \"dnsmasq-dns-665946c669-kdpds\" (UID: \"b304fcec-3561-4932-b04d-5e04c64fbc7c\") " pod="openstack/dnsmasq-dns-665946c669-kdpds" Oct 09 09:22:42 crc kubenswrapper[4710]: I1009 09:22:42.737339 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-665946c669-kdpds" Oct 09 09:22:43 crc kubenswrapper[4710]: I1009 09:22:43.217592 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-665946c669-kdpds"] Oct 09 09:22:44 crc kubenswrapper[4710]: I1009 09:22:44.218655 4710 generic.go:334] "Generic (PLEG): container finished" podID="b304fcec-3561-4932-b04d-5e04c64fbc7c" containerID="6db41d3d0d76b4c02de77e191aa70f1797aea956037f2f96f40bdb8e7cfd0a5f" exitCode=0 Oct 09 09:22:44 crc kubenswrapper[4710]: I1009 09:22:44.218734 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-665946c669-kdpds" event={"ID":"b304fcec-3561-4932-b04d-5e04c64fbc7c","Type":"ContainerDied","Data":"6db41d3d0d76b4c02de77e191aa70f1797aea956037f2f96f40bdb8e7cfd0a5f"} Oct 09 09:22:44 crc kubenswrapper[4710]: I1009 09:22:44.218974 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-665946c669-kdpds" event={"ID":"b304fcec-3561-4932-b04d-5e04c64fbc7c","Type":"ContainerStarted","Data":"f43017271c562b71bd62fba85ba9a4801208f131081d39e4a6f537ea3de5892c"} Oct 09 09:22:44 crc kubenswrapper[4710]: I1009 09:22:44.878652 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:22:44 crc kubenswrapper[4710]: I1009 09:22:44.879172 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f6e0a2a9-9a61-4202-a6e8-34ea5d035533" containerName="ceilometer-central-agent" containerID="cri-o://b906b329e0ea342b9e4d9e0ec203441ecc5add0b02d387061750835487e286bb" gracePeriod=30 Oct 09 09:22:44 crc kubenswrapper[4710]: I1009 09:22:44.879647 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f6e0a2a9-9a61-4202-a6e8-34ea5d035533" containerName="proxy-httpd" containerID="cri-o://4c9d4e523c69561e27f201a6926ed6c7dd788c165f8b788ea9d687942bfc2f97" gracePeriod=30 Oct 09 09:22:44 crc kubenswrapper[4710]: I1009 09:22:44.879711 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f6e0a2a9-9a61-4202-a6e8-34ea5d035533" containerName="sg-core" containerID="cri-o://65bd2074ea941d8e5a32f9147fd9fd83d5a8741e5b37c64d2dee2bf5679be340" gracePeriod=30 Oct 09 09:22:44 crc kubenswrapper[4710]: I1009 09:22:44.880992 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f6e0a2a9-9a61-4202-a6e8-34ea5d035533" containerName="ceilometer-notification-agent" containerID="cri-o://c1b4461e4e77e572b0dc308bd0e19e344db35200713ae1c650af59a4fd2b775f" gracePeriod=30 Oct 09 09:22:44 crc kubenswrapper[4710]: I1009 09:22:44.902509 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 09 09:22:44 crc kubenswrapper[4710]: I1009 09:22:44.987540 4710 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pode80dad3d-99a6-42a9-8d55-c54f02dee2bd"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pode80dad3d-99a6-42a9-8d55-c54f02dee2bd] : Timed out while waiting for systemd to remove kubepods-besteffort-pode80dad3d_99a6_42a9_8d55_c54f02dee2bd.slice" Oct 09 09:22:45 crc kubenswrapper[4710]: I1009 09:22:45.227909 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-665946c669-kdpds" event={"ID":"b304fcec-3561-4932-b04d-5e04c64fbc7c","Type":"ContainerStarted","Data":"e0901dc3857634576d99c8f2fb5655ec8de54a3d3d126bcb6701cca76c79fac3"} Oct 09 09:22:45 crc kubenswrapper[4710]: I1009 09:22:45.229092 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-665946c669-kdpds" Oct 09 09:22:45 crc kubenswrapper[4710]: I1009 09:22:45.231220 4710 generic.go:334] "Generic (PLEG): container finished" podID="f6e0a2a9-9a61-4202-a6e8-34ea5d035533" containerID="4c9d4e523c69561e27f201a6926ed6c7dd788c165f8b788ea9d687942bfc2f97" exitCode=0 Oct 09 09:22:45 crc kubenswrapper[4710]: I1009 09:22:45.231245 4710 generic.go:334] "Generic (PLEG): container finished" podID="f6e0a2a9-9a61-4202-a6e8-34ea5d035533" containerID="65bd2074ea941d8e5a32f9147fd9fd83d5a8741e5b37c64d2dee2bf5679be340" exitCode=2 Oct 09 09:22:45 crc kubenswrapper[4710]: I1009 09:22:45.231378 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="52a99cb4-bb33-4b66-b424-abb4af7c180f" containerName="nova-api-log" containerID="cri-o://9be7ee728f2a4220e06002c9e8b95ee39d5b530274a34d5b5ff41bffef8e0f56" gracePeriod=30 Oct 09 09:22:45 crc kubenswrapper[4710]: I1009 09:22:45.231642 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e0a2a9-9a61-4202-a6e8-34ea5d035533","Type":"ContainerDied","Data":"4c9d4e523c69561e27f201a6926ed6c7dd788c165f8b788ea9d687942bfc2f97"} Oct 09 09:22:45 crc kubenswrapper[4710]: I1009 09:22:45.231669 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e0a2a9-9a61-4202-a6e8-34ea5d035533","Type":"ContainerDied","Data":"65bd2074ea941d8e5a32f9147fd9fd83d5a8741e5b37c64d2dee2bf5679be340"} Oct 09 09:22:45 crc kubenswrapper[4710]: I1009 09:22:45.231723 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="52a99cb4-bb33-4b66-b424-abb4af7c180f" containerName="nova-api-api" containerID="cri-o://2f80d0367ac837f6cdd42e998411157751a4ce960aa777542c22751f057b39f9" gracePeriod=30 Oct 09 09:22:45 crc kubenswrapper[4710]: I1009 09:22:45.251751 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-665946c669-kdpds" podStartSLOduration=3.251741074 podStartE2EDuration="3.251741074s" podCreationTimestamp="2025-10-09 09:22:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:22:45.250082085 +0000 UTC m=+1088.740190482" watchObservedRunningTime="2025-10-09 09:22:45.251741074 +0000 UTC m=+1088.741849471" Oct 09 09:22:45 crc kubenswrapper[4710]: I1009 09:22:45.485521 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:46 crc kubenswrapper[4710]: I1009 09:22:46.246564 4710 generic.go:334] "Generic (PLEG): container finished" podID="52a99cb4-bb33-4b66-b424-abb4af7c180f" containerID="9be7ee728f2a4220e06002c9e8b95ee39d5b530274a34d5b5ff41bffef8e0f56" exitCode=143 Oct 09 09:22:46 crc kubenswrapper[4710]: I1009 09:22:46.246653 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"52a99cb4-bb33-4b66-b424-abb4af7c180f","Type":"ContainerDied","Data":"9be7ee728f2a4220e06002c9e8b95ee39d5b530274a34d5b5ff41bffef8e0f56"} Oct 09 09:22:46 crc kubenswrapper[4710]: I1009 09:22:46.251388 4710 generic.go:334] "Generic (PLEG): container finished" podID="f6e0a2a9-9a61-4202-a6e8-34ea5d035533" containerID="b906b329e0ea342b9e4d9e0ec203441ecc5add0b02d387061750835487e286bb" exitCode=0 Oct 09 09:22:46 crc kubenswrapper[4710]: I1009 09:22:46.251488 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e0a2a9-9a61-4202-a6e8-34ea5d035533","Type":"ContainerDied","Data":"b906b329e0ea342b9e4d9e0ec203441ecc5add0b02d387061750835487e286bb"} Oct 09 09:22:48 crc kubenswrapper[4710]: I1009 09:22:48.249393 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 09:22:48 crc kubenswrapper[4710]: I1009 09:22:48.250208 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="dd02e6a1-01c6-402c-bebb-b34fde77ce36" containerName="kube-state-metrics" containerID="cri-o://978bc13b627fae0d01b18b5a1941c5eb70c54189ee76bb959345571defc0fa83" gracePeriod=30 Oct 09 09:22:48 crc kubenswrapper[4710]: I1009 09:22:48.722625 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 09 09:22:48 crc kubenswrapper[4710]: I1009 09:22:48.791871 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 09:22:48 crc kubenswrapper[4710]: I1009 09:22:48.877614 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vkk7\" (UniqueName: \"kubernetes.io/projected/dd02e6a1-01c6-402c-bebb-b34fde77ce36-kube-api-access-6vkk7\") pod \"dd02e6a1-01c6-402c-bebb-b34fde77ce36\" (UID: \"dd02e6a1-01c6-402c-bebb-b34fde77ce36\") " Oct 09 09:22:48 crc kubenswrapper[4710]: I1009 09:22:48.890691 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd02e6a1-01c6-402c-bebb-b34fde77ce36-kube-api-access-6vkk7" (OuterVolumeSpecName: "kube-api-access-6vkk7") pod "dd02e6a1-01c6-402c-bebb-b34fde77ce36" (UID: "dd02e6a1-01c6-402c-bebb-b34fde77ce36"). InnerVolumeSpecName "kube-api-access-6vkk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:22:48 crc kubenswrapper[4710]: I1009 09:22:48.980094 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52a99cb4-bb33-4b66-b424-abb4af7c180f-config-data\") pod \"52a99cb4-bb33-4b66-b424-abb4af7c180f\" (UID: \"52a99cb4-bb33-4b66-b424-abb4af7c180f\") " Oct 09 09:22:48 crc kubenswrapper[4710]: I1009 09:22:48.980378 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52a99cb4-bb33-4b66-b424-abb4af7c180f-logs\") pod \"52a99cb4-bb33-4b66-b424-abb4af7c180f\" (UID: \"52a99cb4-bb33-4b66-b424-abb4af7c180f\") " Oct 09 09:22:48 crc kubenswrapper[4710]: I1009 09:22:48.980467 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nbnmj\" (UniqueName: \"kubernetes.io/projected/52a99cb4-bb33-4b66-b424-abb4af7c180f-kube-api-access-nbnmj\") pod \"52a99cb4-bb33-4b66-b424-abb4af7c180f\" (UID: \"52a99cb4-bb33-4b66-b424-abb4af7c180f\") " Oct 09 09:22:48 crc kubenswrapper[4710]: I1009 09:22:48.980515 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52a99cb4-bb33-4b66-b424-abb4af7c180f-combined-ca-bundle\") pod \"52a99cb4-bb33-4b66-b424-abb4af7c180f\" (UID: \"52a99cb4-bb33-4b66-b424-abb4af7c180f\") " Oct 09 09:22:48 crc kubenswrapper[4710]: I1009 09:22:48.980800 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52a99cb4-bb33-4b66-b424-abb4af7c180f-logs" (OuterVolumeSpecName: "logs") pod "52a99cb4-bb33-4b66-b424-abb4af7c180f" (UID: "52a99cb4-bb33-4b66-b424-abb4af7c180f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:22:48 crc kubenswrapper[4710]: I1009 09:22:48.981487 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vkk7\" (UniqueName: \"kubernetes.io/projected/dd02e6a1-01c6-402c-bebb-b34fde77ce36-kube-api-access-6vkk7\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:48 crc kubenswrapper[4710]: I1009 09:22:48.981509 4710 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52a99cb4-bb33-4b66-b424-abb4af7c180f-logs\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:48 crc kubenswrapper[4710]: I1009 09:22:48.986553 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52a99cb4-bb33-4b66-b424-abb4af7c180f-kube-api-access-nbnmj" (OuterVolumeSpecName: "kube-api-access-nbnmj") pod "52a99cb4-bb33-4b66-b424-abb4af7c180f" (UID: "52a99cb4-bb33-4b66-b424-abb4af7c180f"). InnerVolumeSpecName "kube-api-access-nbnmj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.003952 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52a99cb4-bb33-4b66-b424-abb4af7c180f-config-data" (OuterVolumeSpecName: "config-data") pod "52a99cb4-bb33-4b66-b424-abb4af7c180f" (UID: "52a99cb4-bb33-4b66-b424-abb4af7c180f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.007005 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52a99cb4-bb33-4b66-b424-abb4af7c180f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "52a99cb4-bb33-4b66-b424-abb4af7c180f" (UID: "52a99cb4-bb33-4b66-b424-abb4af7c180f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.084235 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nbnmj\" (UniqueName: \"kubernetes.io/projected/52a99cb4-bb33-4b66-b424-abb4af7c180f-kube-api-access-nbnmj\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.084280 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52a99cb4-bb33-4b66-b424-abb4af7c180f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.084297 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52a99cb4-bb33-4b66-b424-abb4af7c180f-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.319194 4710 generic.go:334] "Generic (PLEG): container finished" podID="52a99cb4-bb33-4b66-b424-abb4af7c180f" containerID="2f80d0367ac837f6cdd42e998411157751a4ce960aa777542c22751f057b39f9" exitCode=0 Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.319314 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"52a99cb4-bb33-4b66-b424-abb4af7c180f","Type":"ContainerDied","Data":"2f80d0367ac837f6cdd42e998411157751a4ce960aa777542c22751f057b39f9"} Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.319348 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"52a99cb4-bb33-4b66-b424-abb4af7c180f","Type":"ContainerDied","Data":"34a25ee35e4afb68e7ba90b05999850c478f030b98e2fc5f43f3106e2b134489"} Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.319391 4710 scope.go:117] "RemoveContainer" containerID="2f80d0367ac837f6cdd42e998411157751a4ce960aa777542c22751f057b39f9" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.319578 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.323512 4710 generic.go:334] "Generic (PLEG): container finished" podID="dd02e6a1-01c6-402c-bebb-b34fde77ce36" containerID="978bc13b627fae0d01b18b5a1941c5eb70c54189ee76bb959345571defc0fa83" exitCode=2 Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.323561 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"dd02e6a1-01c6-402c-bebb-b34fde77ce36","Type":"ContainerDied","Data":"978bc13b627fae0d01b18b5a1941c5eb70c54189ee76bb959345571defc0fa83"} Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.323634 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"dd02e6a1-01c6-402c-bebb-b34fde77ce36","Type":"ContainerDied","Data":"33acd33ce3caee2780d890ade8f95805599056134e45d99173e03bfb9203acff"} Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.323720 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.348121 4710 scope.go:117] "RemoveContainer" containerID="9be7ee728f2a4220e06002c9e8b95ee39d5b530274a34d5b5ff41bffef8e0f56" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.372578 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.388402 4710 scope.go:117] "RemoveContainer" containerID="2f80d0367ac837f6cdd42e998411157751a4ce960aa777542c22751f057b39f9" Oct 09 09:22:49 crc kubenswrapper[4710]: E1009 09:22:49.390801 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f80d0367ac837f6cdd42e998411157751a4ce960aa777542c22751f057b39f9\": container with ID starting with 2f80d0367ac837f6cdd42e998411157751a4ce960aa777542c22751f057b39f9 not found: ID does not exist" containerID="2f80d0367ac837f6cdd42e998411157751a4ce960aa777542c22751f057b39f9" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.390841 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f80d0367ac837f6cdd42e998411157751a4ce960aa777542c22751f057b39f9"} err="failed to get container status \"2f80d0367ac837f6cdd42e998411157751a4ce960aa777542c22751f057b39f9\": rpc error: code = NotFound desc = could not find container \"2f80d0367ac837f6cdd42e998411157751a4ce960aa777542c22751f057b39f9\": container with ID starting with 2f80d0367ac837f6cdd42e998411157751a4ce960aa777542c22751f057b39f9 not found: ID does not exist" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.390874 4710 scope.go:117] "RemoveContainer" containerID="9be7ee728f2a4220e06002c9e8b95ee39d5b530274a34d5b5ff41bffef8e0f56" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.391106 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 09 09:22:49 crc kubenswrapper[4710]: E1009 09:22:49.391446 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9be7ee728f2a4220e06002c9e8b95ee39d5b530274a34d5b5ff41bffef8e0f56\": container with ID starting with 9be7ee728f2a4220e06002c9e8b95ee39d5b530274a34d5b5ff41bffef8e0f56 not found: ID does not exist" containerID="9be7ee728f2a4220e06002c9e8b95ee39d5b530274a34d5b5ff41bffef8e0f56" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.391497 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9be7ee728f2a4220e06002c9e8b95ee39d5b530274a34d5b5ff41bffef8e0f56"} err="failed to get container status \"9be7ee728f2a4220e06002c9e8b95ee39d5b530274a34d5b5ff41bffef8e0f56\": rpc error: code = NotFound desc = could not find container \"9be7ee728f2a4220e06002c9e8b95ee39d5b530274a34d5b5ff41bffef8e0f56\": container with ID starting with 9be7ee728f2a4220e06002c9e8b95ee39d5b530274a34d5b5ff41bffef8e0f56 not found: ID does not exist" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.391526 4710 scope.go:117] "RemoveContainer" containerID="978bc13b627fae0d01b18b5a1941c5eb70c54189ee76bb959345571defc0fa83" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.400353 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 09 09:22:49 crc kubenswrapper[4710]: E1009 09:22:49.400823 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52a99cb4-bb33-4b66-b424-abb4af7c180f" containerName="nova-api-log" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.400841 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="52a99cb4-bb33-4b66-b424-abb4af7c180f" containerName="nova-api-log" Oct 09 09:22:49 crc kubenswrapper[4710]: E1009 09:22:49.400863 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd02e6a1-01c6-402c-bebb-b34fde77ce36" containerName="kube-state-metrics" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.400870 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd02e6a1-01c6-402c-bebb-b34fde77ce36" containerName="kube-state-metrics" Oct 09 09:22:49 crc kubenswrapper[4710]: E1009 09:22:49.400897 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52a99cb4-bb33-4b66-b424-abb4af7c180f" containerName="nova-api-api" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.400903 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="52a99cb4-bb33-4b66-b424-abb4af7c180f" containerName="nova-api-api" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.401182 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="52a99cb4-bb33-4b66-b424-abb4af7c180f" containerName="nova-api-api" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.401203 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd02e6a1-01c6-402c-bebb-b34fde77ce36" containerName="kube-state-metrics" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.401211 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="52a99cb4-bb33-4b66-b424-abb4af7c180f" containerName="nova-api-log" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.402374 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.409151 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.409357 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.409567 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.422212 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.434761 4710 scope.go:117] "RemoveContainer" containerID="978bc13b627fae0d01b18b5a1941c5eb70c54189ee76bb959345571defc0fa83" Oct 09 09:22:49 crc kubenswrapper[4710]: E1009 09:22:49.435150 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"978bc13b627fae0d01b18b5a1941c5eb70c54189ee76bb959345571defc0fa83\": container with ID starting with 978bc13b627fae0d01b18b5a1941c5eb70c54189ee76bb959345571defc0fa83 not found: ID does not exist" containerID="978bc13b627fae0d01b18b5a1941c5eb70c54189ee76bb959345571defc0fa83" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.435182 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"978bc13b627fae0d01b18b5a1941c5eb70c54189ee76bb959345571defc0fa83"} err="failed to get container status \"978bc13b627fae0d01b18b5a1941c5eb70c54189ee76bb959345571defc0fa83\": rpc error: code = NotFound desc = could not find container \"978bc13b627fae0d01b18b5a1941c5eb70c54189ee76bb959345571defc0fa83\": container with ID starting with 978bc13b627fae0d01b18b5a1941c5eb70c54189ee76bb959345571defc0fa83 not found: ID does not exist" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.442004 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.451213 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.461114 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.462637 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.464900 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.466019 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.471375 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.495827 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f2955ba-a1b4-4cad-8c8b-35d74d914474-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"5f2955ba-a1b4-4cad-8c8b-35d74d914474\") " pod="openstack/kube-state-metrics-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.496081 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/5f2955ba-a1b4-4cad-8c8b-35d74d914474-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"5f2955ba-a1b4-4cad-8c8b-35d74d914474\") " pod="openstack/kube-state-metrics-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.496145 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvrmd\" (UniqueName: \"kubernetes.io/projected/9606f170-94c5-4db2-84e0-ed69c86ff944-kube-api-access-xvrmd\") pod \"nova-api-0\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " pod="openstack/nova-api-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.496228 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f2955ba-a1b4-4cad-8c8b-35d74d914474-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"5f2955ba-a1b4-4cad-8c8b-35d74d914474\") " pod="openstack/kube-state-metrics-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.496366 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " pod="openstack/nova-api-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.496403 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-public-tls-certs\") pod \"nova-api-0\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " pod="openstack/nova-api-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.496453 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-config-data\") pod \"nova-api-0\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " pod="openstack/nova-api-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.496500 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9606f170-94c5-4db2-84e0-ed69c86ff944-logs\") pod \"nova-api-0\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " pod="openstack/nova-api-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.496626 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdvfj\" (UniqueName: \"kubernetes.io/projected/5f2955ba-a1b4-4cad-8c8b-35d74d914474-kube-api-access-zdvfj\") pod \"kube-state-metrics-0\" (UID: \"5f2955ba-a1b4-4cad-8c8b-35d74d914474\") " pod="openstack/kube-state-metrics-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.496652 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " pod="openstack/nova-api-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.598883 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f2955ba-a1b4-4cad-8c8b-35d74d914474-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"5f2955ba-a1b4-4cad-8c8b-35d74d914474\") " pod="openstack/kube-state-metrics-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.599006 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/5f2955ba-a1b4-4cad-8c8b-35d74d914474-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"5f2955ba-a1b4-4cad-8c8b-35d74d914474\") " pod="openstack/kube-state-metrics-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.599035 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvrmd\" (UniqueName: \"kubernetes.io/projected/9606f170-94c5-4db2-84e0-ed69c86ff944-kube-api-access-xvrmd\") pod \"nova-api-0\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " pod="openstack/nova-api-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.599059 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f2955ba-a1b4-4cad-8c8b-35d74d914474-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"5f2955ba-a1b4-4cad-8c8b-35d74d914474\") " pod="openstack/kube-state-metrics-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.599102 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " pod="openstack/nova-api-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.599124 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-public-tls-certs\") pod \"nova-api-0\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " pod="openstack/nova-api-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.599143 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-config-data\") pod \"nova-api-0\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " pod="openstack/nova-api-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.599165 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9606f170-94c5-4db2-84e0-ed69c86ff944-logs\") pod \"nova-api-0\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " pod="openstack/nova-api-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.599199 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdvfj\" (UniqueName: \"kubernetes.io/projected/5f2955ba-a1b4-4cad-8c8b-35d74d914474-kube-api-access-zdvfj\") pod \"kube-state-metrics-0\" (UID: \"5f2955ba-a1b4-4cad-8c8b-35d74d914474\") " pod="openstack/kube-state-metrics-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.599215 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " pod="openstack/nova-api-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.601140 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9606f170-94c5-4db2-84e0-ed69c86ff944-logs\") pod \"nova-api-0\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " pod="openstack/nova-api-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.604986 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " pod="openstack/nova-api-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.606978 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " pod="openstack/nova-api-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.609665 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/5f2955ba-a1b4-4cad-8c8b-35d74d914474-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"5f2955ba-a1b4-4cad-8c8b-35d74d914474\") " pod="openstack/kube-state-metrics-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.610076 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-config-data\") pod \"nova-api-0\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " pod="openstack/nova-api-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.612656 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f2955ba-a1b4-4cad-8c8b-35d74d914474-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"5f2955ba-a1b4-4cad-8c8b-35d74d914474\") " pod="openstack/kube-state-metrics-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.617480 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-public-tls-certs\") pod \"nova-api-0\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " pod="openstack/nova-api-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.625183 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvrmd\" (UniqueName: \"kubernetes.io/projected/9606f170-94c5-4db2-84e0-ed69c86ff944-kube-api-access-xvrmd\") pod \"nova-api-0\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " pod="openstack/nova-api-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.630577 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f2955ba-a1b4-4cad-8c8b-35d74d914474-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"5f2955ba-a1b4-4cad-8c8b-35d74d914474\") " pod="openstack/kube-state-metrics-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.633751 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdvfj\" (UniqueName: \"kubernetes.io/projected/5f2955ba-a1b4-4cad-8c8b-35d74d914474-kube-api-access-zdvfj\") pod \"kube-state-metrics-0\" (UID: \"5f2955ba-a1b4-4cad-8c8b-35d74d914474\") " pod="openstack/kube-state-metrics-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.742671 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.784136 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 09 09:22:49 crc kubenswrapper[4710]: I1009 09:22:49.947179 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.013356 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-log-httpd\") pod \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.013420 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-combined-ca-bundle\") pod \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.013481 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-scripts\") pod \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.013560 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-run-httpd\") pod \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.013587 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gkq22\" (UniqueName: \"kubernetes.io/projected/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-kube-api-access-gkq22\") pod \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.013671 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-sg-core-conf-yaml\") pod \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.013699 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-config-data\") pod \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\" (UID: \"f6e0a2a9-9a61-4202-a6e8-34ea5d035533\") " Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.014932 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f6e0a2a9-9a61-4202-a6e8-34ea5d035533" (UID: "f6e0a2a9-9a61-4202-a6e8-34ea5d035533"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.015569 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f6e0a2a9-9a61-4202-a6e8-34ea5d035533" (UID: "f6e0a2a9-9a61-4202-a6e8-34ea5d035533"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.020640 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-scripts" (OuterVolumeSpecName: "scripts") pod "f6e0a2a9-9a61-4202-a6e8-34ea5d035533" (UID: "f6e0a2a9-9a61-4202-a6e8-34ea5d035533"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.023478 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-kube-api-access-gkq22" (OuterVolumeSpecName: "kube-api-access-gkq22") pod "f6e0a2a9-9a61-4202-a6e8-34ea5d035533" (UID: "f6e0a2a9-9a61-4202-a6e8-34ea5d035533"). InnerVolumeSpecName "kube-api-access-gkq22". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.076462 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f6e0a2a9-9a61-4202-a6e8-34ea5d035533" (UID: "f6e0a2a9-9a61-4202-a6e8-34ea5d035533"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.117717 4710 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.117754 4710 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.117769 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.117778 4710 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.117790 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gkq22\" (UniqueName: \"kubernetes.io/projected/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-kube-api-access-gkq22\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.145699 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f6e0a2a9-9a61-4202-a6e8-34ea5d035533" (UID: "f6e0a2a9-9a61-4202-a6e8-34ea5d035533"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.149896 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-config-data" (OuterVolumeSpecName: "config-data") pod "f6e0a2a9-9a61-4202-a6e8-34ea5d035533" (UID: "f6e0a2a9-9a61-4202-a6e8-34ea5d035533"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.220840 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.221142 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6e0a2a9-9a61-4202-a6e8-34ea5d035533-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.302308 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.338348 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9606f170-94c5-4db2-84e0-ed69c86ff944","Type":"ContainerStarted","Data":"432db8822bfc5be561effd6c334352b6f18e91c0adb824d921bac680a5263297"} Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.341909 4710 generic.go:334] "Generic (PLEG): container finished" podID="f6e0a2a9-9a61-4202-a6e8-34ea5d035533" containerID="c1b4461e4e77e572b0dc308bd0e19e344db35200713ae1c650af59a4fd2b775f" exitCode=0 Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.341961 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e0a2a9-9a61-4202-a6e8-34ea5d035533","Type":"ContainerDied","Data":"c1b4461e4e77e572b0dc308bd0e19e344db35200713ae1c650af59a4fd2b775f"} Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.341995 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f6e0a2a9-9a61-4202-a6e8-34ea5d035533","Type":"ContainerDied","Data":"8273678c1e002d63f66f907c246168823901a228de5a39ec3fa87d5f302afec6"} Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.342032 4710 scope.go:117] "RemoveContainer" containerID="4c9d4e523c69561e27f201a6926ed6c7dd788c165f8b788ea9d687942bfc2f97" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.342039 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.369052 4710 scope.go:117] "RemoveContainer" containerID="65bd2074ea941d8e5a32f9147fd9fd83d5a8741e5b37c64d2dee2bf5679be340" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.424152 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 09 09:22:50 crc kubenswrapper[4710]: W1009 09:22:50.437662 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f2955ba_a1b4_4cad_8c8b_35d74d914474.slice/crio-ab25613ea916a90bcce2fb34e15722204c108373d7857e56d28e35ee926e9141 WatchSource:0}: Error finding container ab25613ea916a90bcce2fb34e15722204c108373d7857e56d28e35ee926e9141: Status 404 returned error can't find the container with id ab25613ea916a90bcce2fb34e15722204c108373d7857e56d28e35ee926e9141 Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.478098 4710 scope.go:117] "RemoveContainer" containerID="c1b4461e4e77e572b0dc308bd0e19e344db35200713ae1c650af59a4fd2b775f" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.486361 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.500173 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.507070 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.517362 4710 scope.go:117] "RemoveContainer" containerID="b906b329e0ea342b9e4d9e0ec203441ecc5add0b02d387061750835487e286bb" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.530168 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:22:50 crc kubenswrapper[4710]: E1009 09:22:50.531635 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6e0a2a9-9a61-4202-a6e8-34ea5d035533" containerName="proxy-httpd" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.531658 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6e0a2a9-9a61-4202-a6e8-34ea5d035533" containerName="proxy-httpd" Oct 09 09:22:50 crc kubenswrapper[4710]: E1009 09:22:50.531676 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6e0a2a9-9a61-4202-a6e8-34ea5d035533" containerName="sg-core" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.531683 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6e0a2a9-9a61-4202-a6e8-34ea5d035533" containerName="sg-core" Oct 09 09:22:50 crc kubenswrapper[4710]: E1009 09:22:50.531699 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6e0a2a9-9a61-4202-a6e8-34ea5d035533" containerName="ceilometer-notification-agent" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.531706 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6e0a2a9-9a61-4202-a6e8-34ea5d035533" containerName="ceilometer-notification-agent" Oct 09 09:22:50 crc kubenswrapper[4710]: E1009 09:22:50.531715 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6e0a2a9-9a61-4202-a6e8-34ea5d035533" containerName="ceilometer-central-agent" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.531721 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6e0a2a9-9a61-4202-a6e8-34ea5d035533" containerName="ceilometer-central-agent" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.531916 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6e0a2a9-9a61-4202-a6e8-34ea5d035533" containerName="ceilometer-notification-agent" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.531937 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6e0a2a9-9a61-4202-a6e8-34ea5d035533" containerName="sg-core" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.531947 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6e0a2a9-9a61-4202-a6e8-34ea5d035533" containerName="proxy-httpd" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.531959 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6e0a2a9-9a61-4202-a6e8-34ea5d035533" containerName="ceilometer-central-agent" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.534780 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.537510 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.551641 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.551862 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.558825 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.561184 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.596541 4710 scope.go:117] "RemoveContainer" containerID="4c9d4e523c69561e27f201a6926ed6c7dd788c165f8b788ea9d687942bfc2f97" Oct 09 09:22:50 crc kubenswrapper[4710]: E1009 09:22:50.601492 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c9d4e523c69561e27f201a6926ed6c7dd788c165f8b788ea9d687942bfc2f97\": container with ID starting with 4c9d4e523c69561e27f201a6926ed6c7dd788c165f8b788ea9d687942bfc2f97 not found: ID does not exist" containerID="4c9d4e523c69561e27f201a6926ed6c7dd788c165f8b788ea9d687942bfc2f97" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.601528 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c9d4e523c69561e27f201a6926ed6c7dd788c165f8b788ea9d687942bfc2f97"} err="failed to get container status \"4c9d4e523c69561e27f201a6926ed6c7dd788c165f8b788ea9d687942bfc2f97\": rpc error: code = NotFound desc = could not find container \"4c9d4e523c69561e27f201a6926ed6c7dd788c165f8b788ea9d687942bfc2f97\": container with ID starting with 4c9d4e523c69561e27f201a6926ed6c7dd788c165f8b788ea9d687942bfc2f97 not found: ID does not exist" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.601554 4710 scope.go:117] "RemoveContainer" containerID="65bd2074ea941d8e5a32f9147fd9fd83d5a8741e5b37c64d2dee2bf5679be340" Oct 09 09:22:50 crc kubenswrapper[4710]: E1009 09:22:50.603520 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65bd2074ea941d8e5a32f9147fd9fd83d5a8741e5b37c64d2dee2bf5679be340\": container with ID starting with 65bd2074ea941d8e5a32f9147fd9fd83d5a8741e5b37c64d2dee2bf5679be340 not found: ID does not exist" containerID="65bd2074ea941d8e5a32f9147fd9fd83d5a8741e5b37c64d2dee2bf5679be340" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.603554 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65bd2074ea941d8e5a32f9147fd9fd83d5a8741e5b37c64d2dee2bf5679be340"} err="failed to get container status \"65bd2074ea941d8e5a32f9147fd9fd83d5a8741e5b37c64d2dee2bf5679be340\": rpc error: code = NotFound desc = could not find container \"65bd2074ea941d8e5a32f9147fd9fd83d5a8741e5b37c64d2dee2bf5679be340\": container with ID starting with 65bd2074ea941d8e5a32f9147fd9fd83d5a8741e5b37c64d2dee2bf5679be340 not found: ID does not exist" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.603575 4710 scope.go:117] "RemoveContainer" containerID="c1b4461e4e77e572b0dc308bd0e19e344db35200713ae1c650af59a4fd2b775f" Oct 09 09:22:50 crc kubenswrapper[4710]: E1009 09:22:50.604570 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1b4461e4e77e572b0dc308bd0e19e344db35200713ae1c650af59a4fd2b775f\": container with ID starting with c1b4461e4e77e572b0dc308bd0e19e344db35200713ae1c650af59a4fd2b775f not found: ID does not exist" containerID="c1b4461e4e77e572b0dc308bd0e19e344db35200713ae1c650af59a4fd2b775f" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.604607 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1b4461e4e77e572b0dc308bd0e19e344db35200713ae1c650af59a4fd2b775f"} err="failed to get container status \"c1b4461e4e77e572b0dc308bd0e19e344db35200713ae1c650af59a4fd2b775f\": rpc error: code = NotFound desc = could not find container \"c1b4461e4e77e572b0dc308bd0e19e344db35200713ae1c650af59a4fd2b775f\": container with ID starting with c1b4461e4e77e572b0dc308bd0e19e344db35200713ae1c650af59a4fd2b775f not found: ID does not exist" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.604636 4710 scope.go:117] "RemoveContainer" containerID="b906b329e0ea342b9e4d9e0ec203441ecc5add0b02d387061750835487e286bb" Oct 09 09:22:50 crc kubenswrapper[4710]: E1009 09:22:50.608535 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b906b329e0ea342b9e4d9e0ec203441ecc5add0b02d387061750835487e286bb\": container with ID starting with b906b329e0ea342b9e4d9e0ec203441ecc5add0b02d387061750835487e286bb not found: ID does not exist" containerID="b906b329e0ea342b9e4d9e0ec203441ecc5add0b02d387061750835487e286bb" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.608565 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b906b329e0ea342b9e4d9e0ec203441ecc5add0b02d387061750835487e286bb"} err="failed to get container status \"b906b329e0ea342b9e4d9e0ec203441ecc5add0b02d387061750835487e286bb\": rpc error: code = NotFound desc = could not find container \"b906b329e0ea342b9e4d9e0ec203441ecc5add0b02d387061750835487e286bb\": container with ID starting with b906b329e0ea342b9e4d9e0ec203441ecc5add0b02d387061750835487e286bb not found: ID does not exist" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.631523 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-scripts\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.631572 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.631593 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bxbx\" (UniqueName: \"kubernetes.io/projected/f8f55b62-b62b-4571-bee9-8af830b6f8f0-kube-api-access-8bxbx\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.631636 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.631658 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8f55b62-b62b-4571-bee9-8af830b6f8f0-log-httpd\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.631677 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.631786 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-config-data\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.631816 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8f55b62-b62b-4571-bee9-8af830b6f8f0-run-httpd\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.733717 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-config-data\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.734518 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8f55b62-b62b-4571-bee9-8af830b6f8f0-run-httpd\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.734719 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-scripts\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.734762 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.734801 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bxbx\" (UniqueName: \"kubernetes.io/projected/f8f55b62-b62b-4571-bee9-8af830b6f8f0-kube-api-access-8bxbx\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.734845 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.734886 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8f55b62-b62b-4571-bee9-8af830b6f8f0-log-httpd\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.734907 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.734987 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8f55b62-b62b-4571-bee9-8af830b6f8f0-run-httpd\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.737077 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8f55b62-b62b-4571-bee9-8af830b6f8f0-log-httpd\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.740450 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-config-data\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.740824 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.741023 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.741748 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.745983 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-scripts\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.760490 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bxbx\" (UniqueName: \"kubernetes.io/projected/f8f55b62-b62b-4571-bee9-8af830b6f8f0-kube-api-access-8bxbx\") pod \"ceilometer-0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " pod="openstack/ceilometer-0" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.833473 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52a99cb4-bb33-4b66-b424-abb4af7c180f" path="/var/lib/kubelet/pods/52a99cb4-bb33-4b66-b424-abb4af7c180f/volumes" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.834098 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd02e6a1-01c6-402c-bebb-b34fde77ce36" path="/var/lib/kubelet/pods/dd02e6a1-01c6-402c-bebb-b34fde77ce36/volumes" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.835954 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6e0a2a9-9a61-4202-a6e8-34ea5d035533" path="/var/lib/kubelet/pods/f6e0a2a9-9a61-4202-a6e8-34ea5d035533/volumes" Oct 09 09:22:50 crc kubenswrapper[4710]: I1009 09:22:50.851572 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:22:51 crc kubenswrapper[4710]: W1009 09:22:51.303901 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf8f55b62_b62b_4571_bee9_8af830b6f8f0.slice/crio-c1abd86898e30038637145051beef46d3f3f67394742e233b1282552313463a0 WatchSource:0}: Error finding container c1abd86898e30038637145051beef46d3f3f67394742e233b1282552313463a0: Status 404 returned error can't find the container with id c1abd86898e30038637145051beef46d3f3f67394742e233b1282552313463a0 Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.304649 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.352820 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9606f170-94c5-4db2-84e0-ed69c86ff944","Type":"ContainerStarted","Data":"e7275c64d33f39ade0934e4e2de532c899cadc4b428c2c1f92b2b6857b21289f"} Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.352862 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9606f170-94c5-4db2-84e0-ed69c86ff944","Type":"ContainerStarted","Data":"76a4703e11407ee8916e9cb0db1f09a736bd9a47f9977db621a4f3109427a607"} Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.355152 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5f2955ba-a1b4-4cad-8c8b-35d74d914474","Type":"ContainerStarted","Data":"6c05c6146ce5697d03818a4cf28898a051f2db5f72e96a6518b9965a00c98f05"} Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.355178 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5f2955ba-a1b4-4cad-8c8b-35d74d914474","Type":"ContainerStarted","Data":"ab25613ea916a90bcce2fb34e15722204c108373d7857e56d28e35ee926e9141"} Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.355438 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.356789 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8f55b62-b62b-4571-bee9-8af830b6f8f0","Type":"ContainerStarted","Data":"c1abd86898e30038637145051beef46d3f3f67394742e233b1282552313463a0"} Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.374306 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.384903 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.3848830420000002 podStartE2EDuration="2.384883042s" podCreationTimestamp="2025-10-09 09:22:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:22:51.377942153 +0000 UTC m=+1094.868050551" watchObservedRunningTime="2025-10-09 09:22:51.384883042 +0000 UTC m=+1094.874991439" Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.426256 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.134256558 podStartE2EDuration="2.426238146s" podCreationTimestamp="2025-10-09 09:22:49 +0000 UTC" firstStartedPulling="2025-10-09 09:22:50.445994416 +0000 UTC m=+1093.936102812" lastFinishedPulling="2025-10-09 09:22:50.737976002 +0000 UTC m=+1094.228084400" observedRunningTime="2025-10-09 09:22:51.411147239 +0000 UTC m=+1094.901255636" watchObservedRunningTime="2025-10-09 09:22:51.426238146 +0000 UTC m=+1094.916346544" Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.529180 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-tlfsf"] Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.530737 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-tlfsf" Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.537332 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.537686 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.557226 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99754569-4c4f-4219-8aad-d9491e9117b5-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-tlfsf\" (UID: \"99754569-4c4f-4219-8aad-d9491e9117b5\") " pod="openstack/nova-cell1-cell-mapping-tlfsf" Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.557275 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kppq8\" (UniqueName: \"kubernetes.io/projected/99754569-4c4f-4219-8aad-d9491e9117b5-kube-api-access-kppq8\") pod \"nova-cell1-cell-mapping-tlfsf\" (UID: \"99754569-4c4f-4219-8aad-d9491e9117b5\") " pod="openstack/nova-cell1-cell-mapping-tlfsf" Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.557354 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99754569-4c4f-4219-8aad-d9491e9117b5-scripts\") pod \"nova-cell1-cell-mapping-tlfsf\" (UID: \"99754569-4c4f-4219-8aad-d9491e9117b5\") " pod="openstack/nova-cell1-cell-mapping-tlfsf" Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.557403 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99754569-4c4f-4219-8aad-d9491e9117b5-config-data\") pod \"nova-cell1-cell-mapping-tlfsf\" (UID: \"99754569-4c4f-4219-8aad-d9491e9117b5\") " pod="openstack/nova-cell1-cell-mapping-tlfsf" Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.570480 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-tlfsf"] Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.659333 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99754569-4c4f-4219-8aad-d9491e9117b5-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-tlfsf\" (UID: \"99754569-4c4f-4219-8aad-d9491e9117b5\") " pod="openstack/nova-cell1-cell-mapping-tlfsf" Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.659388 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kppq8\" (UniqueName: \"kubernetes.io/projected/99754569-4c4f-4219-8aad-d9491e9117b5-kube-api-access-kppq8\") pod \"nova-cell1-cell-mapping-tlfsf\" (UID: \"99754569-4c4f-4219-8aad-d9491e9117b5\") " pod="openstack/nova-cell1-cell-mapping-tlfsf" Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.659464 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99754569-4c4f-4219-8aad-d9491e9117b5-scripts\") pod \"nova-cell1-cell-mapping-tlfsf\" (UID: \"99754569-4c4f-4219-8aad-d9491e9117b5\") " pod="openstack/nova-cell1-cell-mapping-tlfsf" Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.659498 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99754569-4c4f-4219-8aad-d9491e9117b5-config-data\") pod \"nova-cell1-cell-mapping-tlfsf\" (UID: \"99754569-4c4f-4219-8aad-d9491e9117b5\") " pod="openstack/nova-cell1-cell-mapping-tlfsf" Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.665980 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99754569-4c4f-4219-8aad-d9491e9117b5-scripts\") pod \"nova-cell1-cell-mapping-tlfsf\" (UID: \"99754569-4c4f-4219-8aad-d9491e9117b5\") " pod="openstack/nova-cell1-cell-mapping-tlfsf" Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.666343 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99754569-4c4f-4219-8aad-d9491e9117b5-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-tlfsf\" (UID: \"99754569-4c4f-4219-8aad-d9491e9117b5\") " pod="openstack/nova-cell1-cell-mapping-tlfsf" Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.672937 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99754569-4c4f-4219-8aad-d9491e9117b5-config-data\") pod \"nova-cell1-cell-mapping-tlfsf\" (UID: \"99754569-4c4f-4219-8aad-d9491e9117b5\") " pod="openstack/nova-cell1-cell-mapping-tlfsf" Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.675353 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kppq8\" (UniqueName: \"kubernetes.io/projected/99754569-4c4f-4219-8aad-d9491e9117b5-kube-api-access-kppq8\") pod \"nova-cell1-cell-mapping-tlfsf\" (UID: \"99754569-4c4f-4219-8aad-d9491e9117b5\") " pod="openstack/nova-cell1-cell-mapping-tlfsf" Oct 09 09:22:51 crc kubenswrapper[4710]: I1009 09:22:51.864216 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-tlfsf" Oct 09 09:22:52 crc kubenswrapper[4710]: I1009 09:22:52.369319 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8f55b62-b62b-4571-bee9-8af830b6f8f0","Type":"ContainerStarted","Data":"8609a1b6561465621721783889e5843e5fa6e6aff66724c5e99f03c8c5c0bef5"} Oct 09 09:22:52 crc kubenswrapper[4710]: I1009 09:22:52.403173 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-tlfsf"] Oct 09 09:22:52 crc kubenswrapper[4710]: W1009 09:22:52.404744 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod99754569_4c4f_4219_8aad_d9491e9117b5.slice/crio-5ce0f2c5457d269ed86490a36d6f43935dd7c1c040aff179027690130e84ce02 WatchSource:0}: Error finding container 5ce0f2c5457d269ed86490a36d6f43935dd7c1c040aff179027690130e84ce02: Status 404 returned error can't find the container with id 5ce0f2c5457d269ed86490a36d6f43935dd7c1c040aff179027690130e84ce02 Oct 09 09:22:52 crc kubenswrapper[4710]: I1009 09:22:52.739487 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-665946c669-kdpds" Oct 09 09:22:52 crc kubenswrapper[4710]: I1009 09:22:52.814631 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75fb48c489-59xpd"] Oct 09 09:22:52 crc kubenswrapper[4710]: I1009 09:22:52.815036 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-75fb48c489-59xpd" podUID="f5fb6ddf-08c7-4cd5-acaf-338e55a82730" containerName="dnsmasq-dns" containerID="cri-o://be471bdc411d4bc229058c8db7483054becb153581fd41590d5f8e0e4106a89f" gracePeriod=10 Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.252354 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75fb48c489-59xpd" Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.331014 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-ovsdbserver-nb\") pod \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\" (UID: \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\") " Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.331188 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-dns-svc\") pod \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\" (UID: \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\") " Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.331223 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rr55m\" (UniqueName: \"kubernetes.io/projected/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-kube-api-access-rr55m\") pod \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\" (UID: \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\") " Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.331302 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-ovsdbserver-sb\") pod \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\" (UID: \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\") " Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.331477 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-config\") pod \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\" (UID: \"f5fb6ddf-08c7-4cd5-acaf-338e55a82730\") " Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.340742 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-kube-api-access-rr55m" (OuterVolumeSpecName: "kube-api-access-rr55m") pod "f5fb6ddf-08c7-4cd5-acaf-338e55a82730" (UID: "f5fb6ddf-08c7-4cd5-acaf-338e55a82730"). InnerVolumeSpecName "kube-api-access-rr55m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.373998 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f5fb6ddf-08c7-4cd5-acaf-338e55a82730" (UID: "f5fb6ddf-08c7-4cd5-acaf-338e55a82730"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.378790 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f5fb6ddf-08c7-4cd5-acaf-338e55a82730" (UID: "f5fb6ddf-08c7-4cd5-acaf-338e55a82730"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.388744 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8f55b62-b62b-4571-bee9-8af830b6f8f0","Type":"ContainerStarted","Data":"9193356d85269ddeb233452187c77556505b3e97645cc1c6b911a04df1eb8f90"} Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.402105 4710 generic.go:334] "Generic (PLEG): container finished" podID="f5fb6ddf-08c7-4cd5-acaf-338e55a82730" containerID="be471bdc411d4bc229058c8db7483054becb153581fd41590d5f8e0e4106a89f" exitCode=0 Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.402251 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75fb48c489-59xpd" Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.402256 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75fb48c489-59xpd" event={"ID":"f5fb6ddf-08c7-4cd5-acaf-338e55a82730","Type":"ContainerDied","Data":"be471bdc411d4bc229058c8db7483054becb153581fd41590d5f8e0e4106a89f"} Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.402409 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75fb48c489-59xpd" event={"ID":"f5fb6ddf-08c7-4cd5-acaf-338e55a82730","Type":"ContainerDied","Data":"b9b982255ede11a6ac6df5ad95d7258a48cc6166225a5af65c40d4ef876c424a"} Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.402610 4710 scope.go:117] "RemoveContainer" containerID="be471bdc411d4bc229058c8db7483054becb153581fd41590d5f8e0e4106a89f" Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.403164 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f5fb6ddf-08c7-4cd5-acaf-338e55a82730" (UID: "f5fb6ddf-08c7-4cd5-acaf-338e55a82730"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.404229 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-tlfsf" event={"ID":"99754569-4c4f-4219-8aad-d9491e9117b5","Type":"ContainerStarted","Data":"82cc46c655a1d76bded7a5c79b4b92b554d6c98650edb09aa90c984b3b38d25a"} Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.404297 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-tlfsf" event={"ID":"99754569-4c4f-4219-8aad-d9491e9117b5","Type":"ContainerStarted","Data":"5ce0f2c5457d269ed86490a36d6f43935dd7c1c040aff179027690130e84ce02"} Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.419935 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-config" (OuterVolumeSpecName: "config") pod "f5fb6ddf-08c7-4cd5-acaf-338e55a82730" (UID: "f5fb6ddf-08c7-4cd5-acaf-338e55a82730"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.450460 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.450485 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.450496 4710 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.450513 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rr55m\" (UniqueName: \"kubernetes.io/projected/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-kube-api-access-rr55m\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.450522 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f5fb6ddf-08c7-4cd5-acaf-338e55a82730-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.484691 4710 scope.go:117] "RemoveContainer" containerID="0eb8fa0aa64e9067bc6082629f296ce99bce32193f7cf8126b002789b3311626" Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.519854 4710 scope.go:117] "RemoveContainer" containerID="be471bdc411d4bc229058c8db7483054becb153581fd41590d5f8e0e4106a89f" Oct 09 09:22:53 crc kubenswrapper[4710]: E1009 09:22:53.521188 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be471bdc411d4bc229058c8db7483054becb153581fd41590d5f8e0e4106a89f\": container with ID starting with be471bdc411d4bc229058c8db7483054becb153581fd41590d5f8e0e4106a89f not found: ID does not exist" containerID="be471bdc411d4bc229058c8db7483054becb153581fd41590d5f8e0e4106a89f" Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.521219 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be471bdc411d4bc229058c8db7483054becb153581fd41590d5f8e0e4106a89f"} err="failed to get container status \"be471bdc411d4bc229058c8db7483054becb153581fd41590d5f8e0e4106a89f\": rpc error: code = NotFound desc = could not find container \"be471bdc411d4bc229058c8db7483054becb153581fd41590d5f8e0e4106a89f\": container with ID starting with be471bdc411d4bc229058c8db7483054becb153581fd41590d5f8e0e4106a89f not found: ID does not exist" Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.521241 4710 scope.go:117] "RemoveContainer" containerID="0eb8fa0aa64e9067bc6082629f296ce99bce32193f7cf8126b002789b3311626" Oct 09 09:22:53 crc kubenswrapper[4710]: E1009 09:22:53.521594 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0eb8fa0aa64e9067bc6082629f296ce99bce32193f7cf8126b002789b3311626\": container with ID starting with 0eb8fa0aa64e9067bc6082629f296ce99bce32193f7cf8126b002789b3311626 not found: ID does not exist" containerID="0eb8fa0aa64e9067bc6082629f296ce99bce32193f7cf8126b002789b3311626" Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.521617 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0eb8fa0aa64e9067bc6082629f296ce99bce32193f7cf8126b002789b3311626"} err="failed to get container status \"0eb8fa0aa64e9067bc6082629f296ce99bce32193f7cf8126b002789b3311626\": rpc error: code = NotFound desc = could not find container \"0eb8fa0aa64e9067bc6082629f296ce99bce32193f7cf8126b002789b3311626\": container with ID starting with 0eb8fa0aa64e9067bc6082629f296ce99bce32193f7cf8126b002789b3311626 not found: ID does not exist" Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.729630 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-tlfsf" podStartSLOduration=2.729603463 podStartE2EDuration="2.729603463s" podCreationTimestamp="2025-10-09 09:22:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:22:53.439932704 +0000 UTC m=+1096.930041101" watchObservedRunningTime="2025-10-09 09:22:53.729603463 +0000 UTC m=+1097.219711850" Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.736779 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75fb48c489-59xpd"] Oct 09 09:22:53 crc kubenswrapper[4710]: I1009 09:22:53.746847 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75fb48c489-59xpd"] Oct 09 09:22:54 crc kubenswrapper[4710]: I1009 09:22:54.827064 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5fb6ddf-08c7-4cd5-acaf-338e55a82730" path="/var/lib/kubelet/pods/f5fb6ddf-08c7-4cd5-acaf-338e55a82730/volumes" Oct 09 09:22:55 crc kubenswrapper[4710]: I1009 09:22:55.458139 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8f55b62-b62b-4571-bee9-8af830b6f8f0","Type":"ContainerStarted","Data":"790eafc42b4544f6036f7ca62edac0dc58b89a6318dd95ba82d5755143bfcec5"} Oct 09 09:22:57 crc kubenswrapper[4710]: I1009 09:22:57.483463 4710 generic.go:334] "Generic (PLEG): container finished" podID="99754569-4c4f-4219-8aad-d9491e9117b5" containerID="82cc46c655a1d76bded7a5c79b4b92b554d6c98650edb09aa90c984b3b38d25a" exitCode=0 Oct 09 09:22:57 crc kubenswrapper[4710]: I1009 09:22:57.483552 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-tlfsf" event={"ID":"99754569-4c4f-4219-8aad-d9491e9117b5","Type":"ContainerDied","Data":"82cc46c655a1d76bded7a5c79b4b92b554d6c98650edb09aa90c984b3b38d25a"} Oct 09 09:22:57 crc kubenswrapper[4710]: I1009 09:22:57.489586 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8f55b62-b62b-4571-bee9-8af830b6f8f0","Type":"ContainerStarted","Data":"cbfec75f5679b7e131e87c217ceb79f5091c1c68b8de8f71af2f81bc1b74eb55"} Oct 09 09:22:57 crc kubenswrapper[4710]: I1009 09:22:57.490112 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 09 09:22:57 crc kubenswrapper[4710]: I1009 09:22:57.526512 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.521996186 podStartE2EDuration="7.526490915s" podCreationTimestamp="2025-10-09 09:22:50 +0000 UTC" firstStartedPulling="2025-10-09 09:22:51.306298954 +0000 UTC m=+1094.796407350" lastFinishedPulling="2025-10-09 09:22:56.310793682 +0000 UTC m=+1099.800902079" observedRunningTime="2025-10-09 09:22:57.520834497 +0000 UTC m=+1101.010942885" watchObservedRunningTime="2025-10-09 09:22:57.526490915 +0000 UTC m=+1101.016599312" Oct 09 09:22:58 crc kubenswrapper[4710]: I1009 09:22:58.828529 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-tlfsf" Oct 09 09:22:58 crc kubenswrapper[4710]: I1009 09:22:58.885258 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99754569-4c4f-4219-8aad-d9491e9117b5-scripts\") pod \"99754569-4c4f-4219-8aad-d9491e9117b5\" (UID: \"99754569-4c4f-4219-8aad-d9491e9117b5\") " Oct 09 09:22:58 crc kubenswrapper[4710]: I1009 09:22:58.885339 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99754569-4c4f-4219-8aad-d9491e9117b5-config-data\") pod \"99754569-4c4f-4219-8aad-d9491e9117b5\" (UID: \"99754569-4c4f-4219-8aad-d9491e9117b5\") " Oct 09 09:22:58 crc kubenswrapper[4710]: I1009 09:22:58.885397 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99754569-4c4f-4219-8aad-d9491e9117b5-combined-ca-bundle\") pod \"99754569-4c4f-4219-8aad-d9491e9117b5\" (UID: \"99754569-4c4f-4219-8aad-d9491e9117b5\") " Oct 09 09:22:58 crc kubenswrapper[4710]: I1009 09:22:58.885660 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kppq8\" (UniqueName: \"kubernetes.io/projected/99754569-4c4f-4219-8aad-d9491e9117b5-kube-api-access-kppq8\") pod \"99754569-4c4f-4219-8aad-d9491e9117b5\" (UID: \"99754569-4c4f-4219-8aad-d9491e9117b5\") " Oct 09 09:22:58 crc kubenswrapper[4710]: I1009 09:22:58.891598 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99754569-4c4f-4219-8aad-d9491e9117b5-kube-api-access-kppq8" (OuterVolumeSpecName: "kube-api-access-kppq8") pod "99754569-4c4f-4219-8aad-d9491e9117b5" (UID: "99754569-4c4f-4219-8aad-d9491e9117b5"). InnerVolumeSpecName "kube-api-access-kppq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:22:58 crc kubenswrapper[4710]: I1009 09:22:58.906917 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99754569-4c4f-4219-8aad-d9491e9117b5-scripts" (OuterVolumeSpecName: "scripts") pod "99754569-4c4f-4219-8aad-d9491e9117b5" (UID: "99754569-4c4f-4219-8aad-d9491e9117b5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:58 crc kubenswrapper[4710]: I1009 09:22:58.908790 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99754569-4c4f-4219-8aad-d9491e9117b5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "99754569-4c4f-4219-8aad-d9491e9117b5" (UID: "99754569-4c4f-4219-8aad-d9491e9117b5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:58 crc kubenswrapper[4710]: I1009 09:22:58.911075 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99754569-4c4f-4219-8aad-d9491e9117b5-config-data" (OuterVolumeSpecName: "config-data") pod "99754569-4c4f-4219-8aad-d9491e9117b5" (UID: "99754569-4c4f-4219-8aad-d9491e9117b5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:22:58 crc kubenswrapper[4710]: I1009 09:22:58.987325 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kppq8\" (UniqueName: \"kubernetes.io/projected/99754569-4c4f-4219-8aad-d9491e9117b5-kube-api-access-kppq8\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:58 crc kubenswrapper[4710]: I1009 09:22:58.987357 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99754569-4c4f-4219-8aad-d9491e9117b5-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:58 crc kubenswrapper[4710]: I1009 09:22:58.987370 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99754569-4c4f-4219-8aad-d9491e9117b5-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:58 crc kubenswrapper[4710]: I1009 09:22:58.987381 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99754569-4c4f-4219-8aad-d9491e9117b5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:22:59 crc kubenswrapper[4710]: I1009 09:22:59.508227 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-tlfsf" event={"ID":"99754569-4c4f-4219-8aad-d9491e9117b5","Type":"ContainerDied","Data":"5ce0f2c5457d269ed86490a36d6f43935dd7c1c040aff179027690130e84ce02"} Oct 09 09:22:59 crc kubenswrapper[4710]: I1009 09:22:59.508563 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ce0f2c5457d269ed86490a36d6f43935dd7c1c040aff179027690130e84ce02" Oct 09 09:22:59 crc kubenswrapper[4710]: I1009 09:22:59.508309 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-tlfsf" Oct 09 09:22:59 crc kubenswrapper[4710]: I1009 09:22:59.680252 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 09 09:22:59 crc kubenswrapper[4710]: I1009 09:22:59.680598 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="9606f170-94c5-4db2-84e0-ed69c86ff944" containerName="nova-api-log" containerID="cri-o://76a4703e11407ee8916e9cb0db1f09a736bd9a47f9977db621a4f3109427a607" gracePeriod=30 Oct 09 09:22:59 crc kubenswrapper[4710]: I1009 09:22:59.681113 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="9606f170-94c5-4db2-84e0-ed69c86ff944" containerName="nova-api-api" containerID="cri-o://e7275c64d33f39ade0934e4e2de532c899cadc4b428c2c1f92b2b6857b21289f" gracePeriod=30 Oct 09 09:22:59 crc kubenswrapper[4710]: I1009 09:22:59.699514 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 09:22:59 crc kubenswrapper[4710]: I1009 09:22:59.699691 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="a2dcd8dc-03a7-4ffb-95bd-67b1a184df16" containerName="nova-scheduler-scheduler" containerID="cri-o://c4720832dd44256cdc4ac23949f1dfe1d0fbfe23bb84f54ef925cc2124fbfa4d" gracePeriod=30 Oct 09 09:22:59 crc kubenswrapper[4710]: I1009 09:22:59.733030 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 09:22:59 crc kubenswrapper[4710]: I1009 09:22:59.733362 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="36765fd5-2419-4e33-bdb9-425b7029d237" containerName="nova-metadata-log" containerID="cri-o://3d87525bd90591dedcdd0672d07d20596368a007f5b1c17ef7fca61055d650ab" gracePeriod=30 Oct 09 09:22:59 crc kubenswrapper[4710]: I1009 09:22:59.733456 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="36765fd5-2419-4e33-bdb9-425b7029d237" containerName="nova-metadata-metadata" containerID="cri-o://9a90fef6e48d1efdc5f61c4fc92a0fb3d81db80ef4c8c970b75fc84165a7b4f4" gracePeriod=30 Oct 09 09:22:59 crc kubenswrapper[4710]: I1009 09:22:59.794064 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.294782 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 09:23:00 crc kubenswrapper[4710]: E1009 09:23:00.345736 4710 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c4720832dd44256cdc4ac23949f1dfe1d0fbfe23bb84f54ef925cc2124fbfa4d" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 09 09:23:00 crc kubenswrapper[4710]: E1009 09:23:00.346969 4710 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c4720832dd44256cdc4ac23949f1dfe1d0fbfe23bb84f54ef925cc2124fbfa4d" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 09 09:23:00 crc kubenswrapper[4710]: E1009 09:23:00.350073 4710 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c4720832dd44256cdc4ac23949f1dfe1d0fbfe23bb84f54ef925cc2124fbfa4d" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 09 09:23:00 crc kubenswrapper[4710]: E1009 09:23:00.350120 4710 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="a2dcd8dc-03a7-4ffb-95bd-67b1a184df16" containerName="nova-scheduler-scheduler" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.425085 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-combined-ca-bundle\") pod \"9606f170-94c5-4db2-84e0-ed69c86ff944\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.425342 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-public-tls-certs\") pod \"9606f170-94c5-4db2-84e0-ed69c86ff944\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.425481 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xvrmd\" (UniqueName: \"kubernetes.io/projected/9606f170-94c5-4db2-84e0-ed69c86ff944-kube-api-access-xvrmd\") pod \"9606f170-94c5-4db2-84e0-ed69c86ff944\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.425557 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-internal-tls-certs\") pod \"9606f170-94c5-4db2-84e0-ed69c86ff944\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.425782 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-config-data\") pod \"9606f170-94c5-4db2-84e0-ed69c86ff944\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.425867 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9606f170-94c5-4db2-84e0-ed69c86ff944-logs\") pod \"9606f170-94c5-4db2-84e0-ed69c86ff944\" (UID: \"9606f170-94c5-4db2-84e0-ed69c86ff944\") " Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.426127 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9606f170-94c5-4db2-84e0-ed69c86ff944-logs" (OuterVolumeSpecName: "logs") pod "9606f170-94c5-4db2-84e0-ed69c86ff944" (UID: "9606f170-94c5-4db2-84e0-ed69c86ff944"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.426450 4710 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9606f170-94c5-4db2-84e0-ed69c86ff944-logs\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.429712 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9606f170-94c5-4db2-84e0-ed69c86ff944-kube-api-access-xvrmd" (OuterVolumeSpecName: "kube-api-access-xvrmd") pod "9606f170-94c5-4db2-84e0-ed69c86ff944" (UID: "9606f170-94c5-4db2-84e0-ed69c86ff944"). InnerVolumeSpecName "kube-api-access-xvrmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.445841 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9606f170-94c5-4db2-84e0-ed69c86ff944" (UID: "9606f170-94c5-4db2-84e0-ed69c86ff944"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.446766 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-config-data" (OuterVolumeSpecName: "config-data") pod "9606f170-94c5-4db2-84e0-ed69c86ff944" (UID: "9606f170-94c5-4db2-84e0-ed69c86ff944"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.463012 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "9606f170-94c5-4db2-84e0-ed69c86ff944" (UID: "9606f170-94c5-4db2-84e0-ed69c86ff944"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.472262 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "9606f170-94c5-4db2-84e0-ed69c86ff944" (UID: "9606f170-94c5-4db2-84e0-ed69c86ff944"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.519753 4710 generic.go:334] "Generic (PLEG): container finished" podID="9606f170-94c5-4db2-84e0-ed69c86ff944" containerID="e7275c64d33f39ade0934e4e2de532c899cadc4b428c2c1f92b2b6857b21289f" exitCode=0 Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.519998 4710 generic.go:334] "Generic (PLEG): container finished" podID="9606f170-94c5-4db2-84e0-ed69c86ff944" containerID="76a4703e11407ee8916e9cb0db1f09a736bd9a47f9977db621a4f3109427a607" exitCode=143 Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.519792 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.519814 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9606f170-94c5-4db2-84e0-ed69c86ff944","Type":"ContainerDied","Data":"e7275c64d33f39ade0934e4e2de532c899cadc4b428c2c1f92b2b6857b21289f"} Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.520087 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9606f170-94c5-4db2-84e0-ed69c86ff944","Type":"ContainerDied","Data":"76a4703e11407ee8916e9cb0db1f09a736bd9a47f9977db621a4f3109427a607"} Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.520098 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9606f170-94c5-4db2-84e0-ed69c86ff944","Type":"ContainerDied","Data":"432db8822bfc5be561effd6c334352b6f18e91c0adb824d921bac680a5263297"} Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.520127 4710 scope.go:117] "RemoveContainer" containerID="e7275c64d33f39ade0934e4e2de532c899cadc4b428c2c1f92b2b6857b21289f" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.528344 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xvrmd\" (UniqueName: \"kubernetes.io/projected/9606f170-94c5-4db2-84e0-ed69c86ff944-kube-api-access-xvrmd\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.528366 4710 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.528375 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.528388 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.528396 4710 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9606f170-94c5-4db2-84e0-ed69c86ff944-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.534046 4710 generic.go:334] "Generic (PLEG): container finished" podID="36765fd5-2419-4e33-bdb9-425b7029d237" containerID="3d87525bd90591dedcdd0672d07d20596368a007f5b1c17ef7fca61055d650ab" exitCode=143 Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.534155 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"36765fd5-2419-4e33-bdb9-425b7029d237","Type":"ContainerDied","Data":"3d87525bd90591dedcdd0672d07d20596368a007f5b1c17ef7fca61055d650ab"} Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.564163 4710 scope.go:117] "RemoveContainer" containerID="76a4703e11407ee8916e9cb0db1f09a736bd9a47f9977db621a4f3109427a607" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.588578 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.591376 4710 scope.go:117] "RemoveContainer" containerID="e7275c64d33f39ade0934e4e2de532c899cadc4b428c2c1f92b2b6857b21289f" Oct 09 09:23:00 crc kubenswrapper[4710]: E1009 09:23:00.593635 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7275c64d33f39ade0934e4e2de532c899cadc4b428c2c1f92b2b6857b21289f\": container with ID starting with e7275c64d33f39ade0934e4e2de532c899cadc4b428c2c1f92b2b6857b21289f not found: ID does not exist" containerID="e7275c64d33f39ade0934e4e2de532c899cadc4b428c2c1f92b2b6857b21289f" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.593789 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7275c64d33f39ade0934e4e2de532c899cadc4b428c2c1f92b2b6857b21289f"} err="failed to get container status \"e7275c64d33f39ade0934e4e2de532c899cadc4b428c2c1f92b2b6857b21289f\": rpc error: code = NotFound desc = could not find container \"e7275c64d33f39ade0934e4e2de532c899cadc4b428c2c1f92b2b6857b21289f\": container with ID starting with e7275c64d33f39ade0934e4e2de532c899cadc4b428c2c1f92b2b6857b21289f not found: ID does not exist" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.593879 4710 scope.go:117] "RemoveContainer" containerID="76a4703e11407ee8916e9cb0db1f09a736bd9a47f9977db621a4f3109427a607" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.595755 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 09 09:23:00 crc kubenswrapper[4710]: E1009 09:23:00.595974 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76a4703e11407ee8916e9cb0db1f09a736bd9a47f9977db621a4f3109427a607\": container with ID starting with 76a4703e11407ee8916e9cb0db1f09a736bd9a47f9977db621a4f3109427a607 not found: ID does not exist" containerID="76a4703e11407ee8916e9cb0db1f09a736bd9a47f9977db621a4f3109427a607" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.596011 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76a4703e11407ee8916e9cb0db1f09a736bd9a47f9977db621a4f3109427a607"} err="failed to get container status \"76a4703e11407ee8916e9cb0db1f09a736bd9a47f9977db621a4f3109427a607\": rpc error: code = NotFound desc = could not find container \"76a4703e11407ee8916e9cb0db1f09a736bd9a47f9977db621a4f3109427a607\": container with ID starting with 76a4703e11407ee8916e9cb0db1f09a736bd9a47f9977db621a4f3109427a607 not found: ID does not exist" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.596042 4710 scope.go:117] "RemoveContainer" containerID="e7275c64d33f39ade0934e4e2de532c899cadc4b428c2c1f92b2b6857b21289f" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.599969 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7275c64d33f39ade0934e4e2de532c899cadc4b428c2c1f92b2b6857b21289f"} err="failed to get container status \"e7275c64d33f39ade0934e4e2de532c899cadc4b428c2c1f92b2b6857b21289f\": rpc error: code = NotFound desc = could not find container \"e7275c64d33f39ade0934e4e2de532c899cadc4b428c2c1f92b2b6857b21289f\": container with ID starting with e7275c64d33f39ade0934e4e2de532c899cadc4b428c2c1f92b2b6857b21289f not found: ID does not exist" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.600007 4710 scope.go:117] "RemoveContainer" containerID="76a4703e11407ee8916e9cb0db1f09a736bd9a47f9977db621a4f3109427a607" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.601481 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76a4703e11407ee8916e9cb0db1f09a736bd9a47f9977db621a4f3109427a607"} err="failed to get container status \"76a4703e11407ee8916e9cb0db1f09a736bd9a47f9977db621a4f3109427a607\": rpc error: code = NotFound desc = could not find container \"76a4703e11407ee8916e9cb0db1f09a736bd9a47f9977db621a4f3109427a607\": container with ID starting with 76a4703e11407ee8916e9cb0db1f09a736bd9a47f9977db621a4f3109427a607 not found: ID does not exist" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.604475 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 09 09:23:00 crc kubenswrapper[4710]: E1009 09:23:00.604905 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5fb6ddf-08c7-4cd5-acaf-338e55a82730" containerName="dnsmasq-dns" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.604925 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5fb6ddf-08c7-4cd5-acaf-338e55a82730" containerName="dnsmasq-dns" Oct 09 09:23:00 crc kubenswrapper[4710]: E1009 09:23:00.604949 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5fb6ddf-08c7-4cd5-acaf-338e55a82730" containerName="init" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.604955 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5fb6ddf-08c7-4cd5-acaf-338e55a82730" containerName="init" Oct 09 09:23:00 crc kubenswrapper[4710]: E1009 09:23:00.604976 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99754569-4c4f-4219-8aad-d9491e9117b5" containerName="nova-manage" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.604982 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="99754569-4c4f-4219-8aad-d9491e9117b5" containerName="nova-manage" Oct 09 09:23:00 crc kubenswrapper[4710]: E1009 09:23:00.604991 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9606f170-94c5-4db2-84e0-ed69c86ff944" containerName="nova-api-log" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.604996 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="9606f170-94c5-4db2-84e0-ed69c86ff944" containerName="nova-api-log" Oct 09 09:23:00 crc kubenswrapper[4710]: E1009 09:23:00.605006 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9606f170-94c5-4db2-84e0-ed69c86ff944" containerName="nova-api-api" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.605011 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="9606f170-94c5-4db2-84e0-ed69c86ff944" containerName="nova-api-api" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.605246 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5fb6ddf-08c7-4cd5-acaf-338e55a82730" containerName="dnsmasq-dns" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.605272 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="9606f170-94c5-4db2-84e0-ed69c86ff944" containerName="nova-api-api" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.605282 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="9606f170-94c5-4db2-84e0-ed69c86ff944" containerName="nova-api-log" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.605288 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="99754569-4c4f-4219-8aad-d9491e9117b5" containerName="nova-manage" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.606288 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.608838 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.609140 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.609332 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.632896 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.735110 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61eb69de-ee17-4084-95dc-0192a6d4a0d4-logs\") pod \"nova-api-0\" (UID: \"61eb69de-ee17-4084-95dc-0192a6d4a0d4\") " pod="openstack/nova-api-0" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.735178 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/61eb69de-ee17-4084-95dc-0192a6d4a0d4-public-tls-certs\") pod \"nova-api-0\" (UID: \"61eb69de-ee17-4084-95dc-0192a6d4a0d4\") " pod="openstack/nova-api-0" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.735304 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/61eb69de-ee17-4084-95dc-0192a6d4a0d4-internal-tls-certs\") pod \"nova-api-0\" (UID: \"61eb69de-ee17-4084-95dc-0192a6d4a0d4\") " pod="openstack/nova-api-0" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.735347 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwb2l\" (UniqueName: \"kubernetes.io/projected/61eb69de-ee17-4084-95dc-0192a6d4a0d4-kube-api-access-hwb2l\") pod \"nova-api-0\" (UID: \"61eb69de-ee17-4084-95dc-0192a6d4a0d4\") " pod="openstack/nova-api-0" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.735367 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61eb69de-ee17-4084-95dc-0192a6d4a0d4-config-data\") pod \"nova-api-0\" (UID: \"61eb69de-ee17-4084-95dc-0192a6d4a0d4\") " pod="openstack/nova-api-0" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.735384 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61eb69de-ee17-4084-95dc-0192a6d4a0d4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"61eb69de-ee17-4084-95dc-0192a6d4a0d4\") " pod="openstack/nova-api-0" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.824150 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9606f170-94c5-4db2-84e0-ed69c86ff944" path="/var/lib/kubelet/pods/9606f170-94c5-4db2-84e0-ed69c86ff944/volumes" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.837286 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61eb69de-ee17-4084-95dc-0192a6d4a0d4-logs\") pod \"nova-api-0\" (UID: \"61eb69de-ee17-4084-95dc-0192a6d4a0d4\") " pod="openstack/nova-api-0" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.837324 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/61eb69de-ee17-4084-95dc-0192a6d4a0d4-public-tls-certs\") pod \"nova-api-0\" (UID: \"61eb69de-ee17-4084-95dc-0192a6d4a0d4\") " pod="openstack/nova-api-0" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.837397 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/61eb69de-ee17-4084-95dc-0192a6d4a0d4-internal-tls-certs\") pod \"nova-api-0\" (UID: \"61eb69de-ee17-4084-95dc-0192a6d4a0d4\") " pod="openstack/nova-api-0" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.837424 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwb2l\" (UniqueName: \"kubernetes.io/projected/61eb69de-ee17-4084-95dc-0192a6d4a0d4-kube-api-access-hwb2l\") pod \"nova-api-0\" (UID: \"61eb69de-ee17-4084-95dc-0192a6d4a0d4\") " pod="openstack/nova-api-0" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.837469 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61eb69de-ee17-4084-95dc-0192a6d4a0d4-config-data\") pod \"nova-api-0\" (UID: \"61eb69de-ee17-4084-95dc-0192a6d4a0d4\") " pod="openstack/nova-api-0" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.837483 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61eb69de-ee17-4084-95dc-0192a6d4a0d4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"61eb69de-ee17-4084-95dc-0192a6d4a0d4\") " pod="openstack/nova-api-0" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.838066 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61eb69de-ee17-4084-95dc-0192a6d4a0d4-logs\") pod \"nova-api-0\" (UID: \"61eb69de-ee17-4084-95dc-0192a6d4a0d4\") " pod="openstack/nova-api-0" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.844279 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/61eb69de-ee17-4084-95dc-0192a6d4a0d4-public-tls-certs\") pod \"nova-api-0\" (UID: \"61eb69de-ee17-4084-95dc-0192a6d4a0d4\") " pod="openstack/nova-api-0" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.844324 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61eb69de-ee17-4084-95dc-0192a6d4a0d4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"61eb69de-ee17-4084-95dc-0192a6d4a0d4\") " pod="openstack/nova-api-0" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.844495 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61eb69de-ee17-4084-95dc-0192a6d4a0d4-config-data\") pod \"nova-api-0\" (UID: \"61eb69de-ee17-4084-95dc-0192a6d4a0d4\") " pod="openstack/nova-api-0" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.844854 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/61eb69de-ee17-4084-95dc-0192a6d4a0d4-internal-tls-certs\") pod \"nova-api-0\" (UID: \"61eb69de-ee17-4084-95dc-0192a6d4a0d4\") " pod="openstack/nova-api-0" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.852323 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwb2l\" (UniqueName: \"kubernetes.io/projected/61eb69de-ee17-4084-95dc-0192a6d4a0d4-kube-api-access-hwb2l\") pod \"nova-api-0\" (UID: \"61eb69de-ee17-4084-95dc-0192a6d4a0d4\") " pod="openstack/nova-api-0" Oct 09 09:23:00 crc kubenswrapper[4710]: I1009 09:23:00.932812 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 09 09:23:01 crc kubenswrapper[4710]: I1009 09:23:01.346670 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 09 09:23:01 crc kubenswrapper[4710]: W1009 09:23:01.348231 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod61eb69de_ee17_4084_95dc_0192a6d4a0d4.slice/crio-21a52f65ee3ab2356fa89b2ec9c70a551a78c7de624b7ec3a3e4be833b9797dc WatchSource:0}: Error finding container 21a52f65ee3ab2356fa89b2ec9c70a551a78c7de624b7ec3a3e4be833b9797dc: Status 404 returned error can't find the container with id 21a52f65ee3ab2356fa89b2ec9c70a551a78c7de624b7ec3a3e4be833b9797dc Oct 09 09:23:01 crc kubenswrapper[4710]: I1009 09:23:01.571286 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"61eb69de-ee17-4084-95dc-0192a6d4a0d4","Type":"ContainerStarted","Data":"2dc129a74d186c31d766696546a12c512a0878362bf47ad4d254ebf7911e76ac"} Oct 09 09:23:01 crc kubenswrapper[4710]: I1009 09:23:01.571340 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"61eb69de-ee17-4084-95dc-0192a6d4a0d4","Type":"ContainerStarted","Data":"21a52f65ee3ab2356fa89b2ec9c70a551a78c7de624b7ec3a3e4be833b9797dc"} Oct 09 09:23:02 crc kubenswrapper[4710]: I1009 09:23:02.581479 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"61eb69de-ee17-4084-95dc-0192a6d4a0d4","Type":"ContainerStarted","Data":"193d3402c71f740624b82d67a491dbd34892ed17814711b4d2ea44579bb56d27"} Oct 09 09:23:02 crc kubenswrapper[4710]: I1009 09:23:02.603495 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.603478848 podStartE2EDuration="2.603478848s" podCreationTimestamp="2025-10-09 09:23:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:23:02.597858878 +0000 UTC m=+1106.087967274" watchObservedRunningTime="2025-10-09 09:23:02.603478848 +0000 UTC m=+1106.093587246" Oct 09 09:23:02 crc kubenswrapper[4710]: I1009 09:23:02.870537 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="36765fd5-2419-4e33-bdb9-425b7029d237" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.179:8775/\": read tcp 10.217.0.2:34228->10.217.0.179:8775: read: connection reset by peer" Oct 09 09:23:02 crc kubenswrapper[4710]: I1009 09:23:02.870553 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="36765fd5-2419-4e33-bdb9-425b7029d237" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.179:8775/\": read tcp 10.217.0.2:34232->10.217.0.179:8775: read: connection reset by peer" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.287325 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.389599 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36765fd5-2419-4e33-bdb9-425b7029d237-logs\") pod \"36765fd5-2419-4e33-bdb9-425b7029d237\" (UID: \"36765fd5-2419-4e33-bdb9-425b7029d237\") " Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.389678 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36765fd5-2419-4e33-bdb9-425b7029d237-combined-ca-bundle\") pod \"36765fd5-2419-4e33-bdb9-425b7029d237\" (UID: \"36765fd5-2419-4e33-bdb9-425b7029d237\") " Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.389769 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36765fd5-2419-4e33-bdb9-425b7029d237-config-data\") pod \"36765fd5-2419-4e33-bdb9-425b7029d237\" (UID: \"36765fd5-2419-4e33-bdb9-425b7029d237\") " Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.389807 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/36765fd5-2419-4e33-bdb9-425b7029d237-nova-metadata-tls-certs\") pod \"36765fd5-2419-4e33-bdb9-425b7029d237\" (UID: \"36765fd5-2419-4e33-bdb9-425b7029d237\") " Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.389834 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzvjk\" (UniqueName: \"kubernetes.io/projected/36765fd5-2419-4e33-bdb9-425b7029d237-kube-api-access-dzvjk\") pod \"36765fd5-2419-4e33-bdb9-425b7029d237\" (UID: \"36765fd5-2419-4e33-bdb9-425b7029d237\") " Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.390983 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36765fd5-2419-4e33-bdb9-425b7029d237-logs" (OuterVolumeSpecName: "logs") pod "36765fd5-2419-4e33-bdb9-425b7029d237" (UID: "36765fd5-2419-4e33-bdb9-425b7029d237"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.410240 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36765fd5-2419-4e33-bdb9-425b7029d237-kube-api-access-dzvjk" (OuterVolumeSpecName: "kube-api-access-dzvjk") pod "36765fd5-2419-4e33-bdb9-425b7029d237" (UID: "36765fd5-2419-4e33-bdb9-425b7029d237"). InnerVolumeSpecName "kube-api-access-dzvjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.452212 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36765fd5-2419-4e33-bdb9-425b7029d237-config-data" (OuterVolumeSpecName: "config-data") pod "36765fd5-2419-4e33-bdb9-425b7029d237" (UID: "36765fd5-2419-4e33-bdb9-425b7029d237"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.458549 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36765fd5-2419-4e33-bdb9-425b7029d237-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36765fd5-2419-4e33-bdb9-425b7029d237" (UID: "36765fd5-2419-4e33-bdb9-425b7029d237"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.480017 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36765fd5-2419-4e33-bdb9-425b7029d237-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "36765fd5-2419-4e33-bdb9-425b7029d237" (UID: "36765fd5-2419-4e33-bdb9-425b7029d237"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.493184 4710 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36765fd5-2419-4e33-bdb9-425b7029d237-logs\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.493247 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36765fd5-2419-4e33-bdb9-425b7029d237-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.493263 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36765fd5-2419-4e33-bdb9-425b7029d237-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.493273 4710 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/36765fd5-2419-4e33-bdb9-425b7029d237-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.493284 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzvjk\" (UniqueName: \"kubernetes.io/projected/36765fd5-2419-4e33-bdb9-425b7029d237-kube-api-access-dzvjk\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.593398 4710 generic.go:334] "Generic (PLEG): container finished" podID="36765fd5-2419-4e33-bdb9-425b7029d237" containerID="9a90fef6e48d1efdc5f61c4fc92a0fb3d81db80ef4c8c970b75fc84165a7b4f4" exitCode=0 Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.593936 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.594490 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"36765fd5-2419-4e33-bdb9-425b7029d237","Type":"ContainerDied","Data":"9a90fef6e48d1efdc5f61c4fc92a0fb3d81db80ef4c8c970b75fc84165a7b4f4"} Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.594561 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"36765fd5-2419-4e33-bdb9-425b7029d237","Type":"ContainerDied","Data":"d94533e64c7773801048567f45a91ea240ea3d759730387ec82d73ab3bc4790a"} Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.594586 4710 scope.go:117] "RemoveContainer" containerID="9a90fef6e48d1efdc5f61c4fc92a0fb3d81db80ef4c8c970b75fc84165a7b4f4" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.617196 4710 scope.go:117] "RemoveContainer" containerID="3d87525bd90591dedcdd0672d07d20596368a007f5b1c17ef7fca61055d650ab" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.647922 4710 scope.go:117] "RemoveContainer" containerID="9a90fef6e48d1efdc5f61c4fc92a0fb3d81db80ef4c8c970b75fc84165a7b4f4" Oct 09 09:23:03 crc kubenswrapper[4710]: E1009 09:23:03.649318 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a90fef6e48d1efdc5f61c4fc92a0fb3d81db80ef4c8c970b75fc84165a7b4f4\": container with ID starting with 9a90fef6e48d1efdc5f61c4fc92a0fb3d81db80ef4c8c970b75fc84165a7b4f4 not found: ID does not exist" containerID="9a90fef6e48d1efdc5f61c4fc92a0fb3d81db80ef4c8c970b75fc84165a7b4f4" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.649416 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a90fef6e48d1efdc5f61c4fc92a0fb3d81db80ef4c8c970b75fc84165a7b4f4"} err="failed to get container status \"9a90fef6e48d1efdc5f61c4fc92a0fb3d81db80ef4c8c970b75fc84165a7b4f4\": rpc error: code = NotFound desc = could not find container \"9a90fef6e48d1efdc5f61c4fc92a0fb3d81db80ef4c8c970b75fc84165a7b4f4\": container with ID starting with 9a90fef6e48d1efdc5f61c4fc92a0fb3d81db80ef4c8c970b75fc84165a7b4f4 not found: ID does not exist" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.649525 4710 scope.go:117] "RemoveContainer" containerID="3d87525bd90591dedcdd0672d07d20596368a007f5b1c17ef7fca61055d650ab" Oct 09 09:23:03 crc kubenswrapper[4710]: E1009 09:23:03.651350 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d87525bd90591dedcdd0672d07d20596368a007f5b1c17ef7fca61055d650ab\": container with ID starting with 3d87525bd90591dedcdd0672d07d20596368a007f5b1c17ef7fca61055d650ab not found: ID does not exist" containerID="3d87525bd90591dedcdd0672d07d20596368a007f5b1c17ef7fca61055d650ab" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.651480 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d87525bd90591dedcdd0672d07d20596368a007f5b1c17ef7fca61055d650ab"} err="failed to get container status \"3d87525bd90591dedcdd0672d07d20596368a007f5b1c17ef7fca61055d650ab\": rpc error: code = NotFound desc = could not find container \"3d87525bd90591dedcdd0672d07d20596368a007f5b1c17ef7fca61055d650ab\": container with ID starting with 3d87525bd90591dedcdd0672d07d20596368a007f5b1c17ef7fca61055d650ab not found: ID does not exist" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.654755 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.659841 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.664458 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 09 09:23:03 crc kubenswrapper[4710]: E1009 09:23:03.664963 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36765fd5-2419-4e33-bdb9-425b7029d237" containerName="nova-metadata-log" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.665045 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="36765fd5-2419-4e33-bdb9-425b7029d237" containerName="nova-metadata-log" Oct 09 09:23:03 crc kubenswrapper[4710]: E1009 09:23:03.665102 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36765fd5-2419-4e33-bdb9-425b7029d237" containerName="nova-metadata-metadata" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.665149 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="36765fd5-2419-4e33-bdb9-425b7029d237" containerName="nova-metadata-metadata" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.665405 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="36765fd5-2419-4e33-bdb9-425b7029d237" containerName="nova-metadata-metadata" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.665503 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="36765fd5-2419-4e33-bdb9-425b7029d237" containerName="nova-metadata-log" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.666529 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.677211 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.677419 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.678317 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.797043 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5g59k\" (UniqueName: \"kubernetes.io/projected/0a140770-37ca-4b77-8eed-bc3ecfed72db-kube-api-access-5g59k\") pod \"nova-metadata-0\" (UID: \"0a140770-37ca-4b77-8eed-bc3ecfed72db\") " pod="openstack/nova-metadata-0" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.797101 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a140770-37ca-4b77-8eed-bc3ecfed72db-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"0a140770-37ca-4b77-8eed-bc3ecfed72db\") " pod="openstack/nova-metadata-0" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.797159 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a140770-37ca-4b77-8eed-bc3ecfed72db-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0a140770-37ca-4b77-8eed-bc3ecfed72db\") " pod="openstack/nova-metadata-0" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.797190 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a140770-37ca-4b77-8eed-bc3ecfed72db-config-data\") pod \"nova-metadata-0\" (UID: \"0a140770-37ca-4b77-8eed-bc3ecfed72db\") " pod="openstack/nova-metadata-0" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.797253 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a140770-37ca-4b77-8eed-bc3ecfed72db-logs\") pod \"nova-metadata-0\" (UID: \"0a140770-37ca-4b77-8eed-bc3ecfed72db\") " pod="openstack/nova-metadata-0" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.900821 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5g59k\" (UniqueName: \"kubernetes.io/projected/0a140770-37ca-4b77-8eed-bc3ecfed72db-kube-api-access-5g59k\") pod \"nova-metadata-0\" (UID: \"0a140770-37ca-4b77-8eed-bc3ecfed72db\") " pod="openstack/nova-metadata-0" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.900947 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a140770-37ca-4b77-8eed-bc3ecfed72db-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"0a140770-37ca-4b77-8eed-bc3ecfed72db\") " pod="openstack/nova-metadata-0" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.901034 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a140770-37ca-4b77-8eed-bc3ecfed72db-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0a140770-37ca-4b77-8eed-bc3ecfed72db\") " pod="openstack/nova-metadata-0" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.901088 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a140770-37ca-4b77-8eed-bc3ecfed72db-config-data\") pod \"nova-metadata-0\" (UID: \"0a140770-37ca-4b77-8eed-bc3ecfed72db\") " pod="openstack/nova-metadata-0" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.901241 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a140770-37ca-4b77-8eed-bc3ecfed72db-logs\") pod \"nova-metadata-0\" (UID: \"0a140770-37ca-4b77-8eed-bc3ecfed72db\") " pod="openstack/nova-metadata-0" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.901825 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a140770-37ca-4b77-8eed-bc3ecfed72db-logs\") pod \"nova-metadata-0\" (UID: \"0a140770-37ca-4b77-8eed-bc3ecfed72db\") " pod="openstack/nova-metadata-0" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.906136 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a140770-37ca-4b77-8eed-bc3ecfed72db-config-data\") pod \"nova-metadata-0\" (UID: \"0a140770-37ca-4b77-8eed-bc3ecfed72db\") " pod="openstack/nova-metadata-0" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.908105 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a140770-37ca-4b77-8eed-bc3ecfed72db-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"0a140770-37ca-4b77-8eed-bc3ecfed72db\") " pod="openstack/nova-metadata-0" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.908961 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a140770-37ca-4b77-8eed-bc3ecfed72db-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0a140770-37ca-4b77-8eed-bc3ecfed72db\") " pod="openstack/nova-metadata-0" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.920376 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5g59k\" (UniqueName: \"kubernetes.io/projected/0a140770-37ca-4b77-8eed-bc3ecfed72db-kube-api-access-5g59k\") pod \"nova-metadata-0\" (UID: \"0a140770-37ca-4b77-8eed-bc3ecfed72db\") " pod="openstack/nova-metadata-0" Oct 09 09:23:03 crc kubenswrapper[4710]: I1009 09:23:03.997849 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.094563 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.224991 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2dcd8dc-03a7-4ffb-95bd-67b1a184df16-combined-ca-bundle\") pod \"a2dcd8dc-03a7-4ffb-95bd-67b1a184df16\" (UID: \"a2dcd8dc-03a7-4ffb-95bd-67b1a184df16\") " Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.225185 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2dcd8dc-03a7-4ffb-95bd-67b1a184df16-config-data\") pod \"a2dcd8dc-03a7-4ffb-95bd-67b1a184df16\" (UID: \"a2dcd8dc-03a7-4ffb-95bd-67b1a184df16\") " Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.225495 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pr7z8\" (UniqueName: \"kubernetes.io/projected/a2dcd8dc-03a7-4ffb-95bd-67b1a184df16-kube-api-access-pr7z8\") pod \"a2dcd8dc-03a7-4ffb-95bd-67b1a184df16\" (UID: \"a2dcd8dc-03a7-4ffb-95bd-67b1a184df16\") " Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.230705 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2dcd8dc-03a7-4ffb-95bd-67b1a184df16-kube-api-access-pr7z8" (OuterVolumeSpecName: "kube-api-access-pr7z8") pod "a2dcd8dc-03a7-4ffb-95bd-67b1a184df16" (UID: "a2dcd8dc-03a7-4ffb-95bd-67b1a184df16"). InnerVolumeSpecName "kube-api-access-pr7z8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.250709 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2dcd8dc-03a7-4ffb-95bd-67b1a184df16-config-data" (OuterVolumeSpecName: "config-data") pod "a2dcd8dc-03a7-4ffb-95bd-67b1a184df16" (UID: "a2dcd8dc-03a7-4ffb-95bd-67b1a184df16"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.251120 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2dcd8dc-03a7-4ffb-95bd-67b1a184df16-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a2dcd8dc-03a7-4ffb-95bd-67b1a184df16" (UID: "a2dcd8dc-03a7-4ffb-95bd-67b1a184df16"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.328467 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2dcd8dc-03a7-4ffb-95bd-67b1a184df16-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.328511 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pr7z8\" (UniqueName: \"kubernetes.io/projected/a2dcd8dc-03a7-4ffb-95bd-67b1a184df16-kube-api-access-pr7z8\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.328524 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2dcd8dc-03a7-4ffb-95bd-67b1a184df16-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:04 crc kubenswrapper[4710]: W1009 09:23:04.407107 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0a140770_37ca_4b77_8eed_bc3ecfed72db.slice/crio-f84699fd0309382790f5236f6c11b2a37df78b8bd846e6ff453c77a5b51f2c17 WatchSource:0}: Error finding container f84699fd0309382790f5236f6c11b2a37df78b8bd846e6ff453c77a5b51f2c17: Status 404 returned error can't find the container with id f84699fd0309382790f5236f6c11b2a37df78b8bd846e6ff453c77a5b51f2c17 Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.408803 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.603170 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0a140770-37ca-4b77-8eed-bc3ecfed72db","Type":"ContainerStarted","Data":"3c1a006af6d8672c809e8578285a5597afe27c4ab68e992a14b80e70ae1cccf4"} Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.603564 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0a140770-37ca-4b77-8eed-bc3ecfed72db","Type":"ContainerStarted","Data":"f84699fd0309382790f5236f6c11b2a37df78b8bd846e6ff453c77a5b51f2c17"} Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.604615 4710 generic.go:334] "Generic (PLEG): container finished" podID="a2dcd8dc-03a7-4ffb-95bd-67b1a184df16" containerID="c4720832dd44256cdc4ac23949f1dfe1d0fbfe23bb84f54ef925cc2124fbfa4d" exitCode=0 Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.604662 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.604681 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a2dcd8dc-03a7-4ffb-95bd-67b1a184df16","Type":"ContainerDied","Data":"c4720832dd44256cdc4ac23949f1dfe1d0fbfe23bb84f54ef925cc2124fbfa4d"} Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.604776 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a2dcd8dc-03a7-4ffb-95bd-67b1a184df16","Type":"ContainerDied","Data":"aea94590a2d11d91599a22cc050fd640454332205cd8adff7fa1d2cf3f86e76e"} Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.604800 4710 scope.go:117] "RemoveContainer" containerID="c4720832dd44256cdc4ac23949f1dfe1d0fbfe23bb84f54ef925cc2124fbfa4d" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.631920 4710 scope.go:117] "RemoveContainer" containerID="c4720832dd44256cdc4ac23949f1dfe1d0fbfe23bb84f54ef925cc2124fbfa4d" Oct 09 09:23:04 crc kubenswrapper[4710]: E1009 09:23:04.632350 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4720832dd44256cdc4ac23949f1dfe1d0fbfe23bb84f54ef925cc2124fbfa4d\": container with ID starting with c4720832dd44256cdc4ac23949f1dfe1d0fbfe23bb84f54ef925cc2124fbfa4d not found: ID does not exist" containerID="c4720832dd44256cdc4ac23949f1dfe1d0fbfe23bb84f54ef925cc2124fbfa4d" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.632383 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4720832dd44256cdc4ac23949f1dfe1d0fbfe23bb84f54ef925cc2124fbfa4d"} err="failed to get container status \"c4720832dd44256cdc4ac23949f1dfe1d0fbfe23bb84f54ef925cc2124fbfa4d\": rpc error: code = NotFound desc = could not find container \"c4720832dd44256cdc4ac23949f1dfe1d0fbfe23bb84f54ef925cc2124fbfa4d\": container with ID starting with c4720832dd44256cdc4ac23949f1dfe1d0fbfe23bb84f54ef925cc2124fbfa4d not found: ID does not exist" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.653083 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.666750 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.675488 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 09:23:04 crc kubenswrapper[4710]: E1009 09:23:04.676083 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2dcd8dc-03a7-4ffb-95bd-67b1a184df16" containerName="nova-scheduler-scheduler" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.676168 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2dcd8dc-03a7-4ffb-95bd-67b1a184df16" containerName="nova-scheduler-scheduler" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.676469 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2dcd8dc-03a7-4ffb-95bd-67b1a184df16" containerName="nova-scheduler-scheduler" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.677248 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.681300 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.698741 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.824304 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36765fd5-2419-4e33-bdb9-425b7029d237" path="/var/lib/kubelet/pods/36765fd5-2419-4e33-bdb9-425b7029d237/volumes" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.825328 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2dcd8dc-03a7-4ffb-95bd-67b1a184df16" path="/var/lib/kubelet/pods/a2dcd8dc-03a7-4ffb-95bd-67b1a184df16/volumes" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.841712 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rm4g2\" (UniqueName: \"kubernetes.io/projected/e717823b-a1aa-46c0-b1a6-be9ada2d596f-kube-api-access-rm4g2\") pod \"nova-scheduler-0\" (UID: \"e717823b-a1aa-46c0-b1a6-be9ada2d596f\") " pod="openstack/nova-scheduler-0" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.841782 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e717823b-a1aa-46c0-b1a6-be9ada2d596f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e717823b-a1aa-46c0-b1a6-be9ada2d596f\") " pod="openstack/nova-scheduler-0" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.841822 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e717823b-a1aa-46c0-b1a6-be9ada2d596f-config-data\") pod \"nova-scheduler-0\" (UID: \"e717823b-a1aa-46c0-b1a6-be9ada2d596f\") " pod="openstack/nova-scheduler-0" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.943812 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rm4g2\" (UniqueName: \"kubernetes.io/projected/e717823b-a1aa-46c0-b1a6-be9ada2d596f-kube-api-access-rm4g2\") pod \"nova-scheduler-0\" (UID: \"e717823b-a1aa-46c0-b1a6-be9ada2d596f\") " pod="openstack/nova-scheduler-0" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.943935 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e717823b-a1aa-46c0-b1a6-be9ada2d596f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e717823b-a1aa-46c0-b1a6-be9ada2d596f\") " pod="openstack/nova-scheduler-0" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.943997 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e717823b-a1aa-46c0-b1a6-be9ada2d596f-config-data\") pod \"nova-scheduler-0\" (UID: \"e717823b-a1aa-46c0-b1a6-be9ada2d596f\") " pod="openstack/nova-scheduler-0" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.949342 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e717823b-a1aa-46c0-b1a6-be9ada2d596f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e717823b-a1aa-46c0-b1a6-be9ada2d596f\") " pod="openstack/nova-scheduler-0" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.954396 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e717823b-a1aa-46c0-b1a6-be9ada2d596f-config-data\") pod \"nova-scheduler-0\" (UID: \"e717823b-a1aa-46c0-b1a6-be9ada2d596f\") " pod="openstack/nova-scheduler-0" Oct 09 09:23:04 crc kubenswrapper[4710]: I1009 09:23:04.960075 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rm4g2\" (UniqueName: \"kubernetes.io/projected/e717823b-a1aa-46c0-b1a6-be9ada2d596f-kube-api-access-rm4g2\") pod \"nova-scheduler-0\" (UID: \"e717823b-a1aa-46c0-b1a6-be9ada2d596f\") " pod="openstack/nova-scheduler-0" Oct 09 09:23:05 crc kubenswrapper[4710]: I1009 09:23:05.002601 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 09 09:23:05 crc kubenswrapper[4710]: I1009 09:23:05.403874 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 09 09:23:05 crc kubenswrapper[4710]: I1009 09:23:05.545921 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:23:05 crc kubenswrapper[4710]: I1009 09:23:05.546001 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:23:05 crc kubenswrapper[4710]: I1009 09:23:05.623368 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e717823b-a1aa-46c0-b1a6-be9ada2d596f","Type":"ContainerStarted","Data":"d40fd84ed3a99c53f0e1cbc080c4d9252f0ded41d496fd1c4e572315e524fa65"} Oct 09 09:23:05 crc kubenswrapper[4710]: I1009 09:23:05.623416 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e717823b-a1aa-46c0-b1a6-be9ada2d596f","Type":"ContainerStarted","Data":"3ddb6b8f5c2eb438ad727051cf108be06a26cee6f34b9d8b12ac8fcc58062660"} Oct 09 09:23:05 crc kubenswrapper[4710]: I1009 09:23:05.624983 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0a140770-37ca-4b77-8eed-bc3ecfed72db","Type":"ContainerStarted","Data":"d87b48585a369480601a389077965e4aa8655c2ae6185d1a304093dc5d6c05d0"} Oct 09 09:23:05 crc kubenswrapper[4710]: I1009 09:23:05.643063 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.6430478229999999 podStartE2EDuration="1.643047823s" podCreationTimestamp="2025-10-09 09:23:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:23:05.63917135 +0000 UTC m=+1109.129279747" watchObservedRunningTime="2025-10-09 09:23:05.643047823 +0000 UTC m=+1109.133156220" Oct 09 09:23:05 crc kubenswrapper[4710]: I1009 09:23:05.663339 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.663331072 podStartE2EDuration="2.663331072s" podCreationTimestamp="2025-10-09 09:23:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:23:05.657999214 +0000 UTC m=+1109.148107611" watchObservedRunningTime="2025-10-09 09:23:05.663331072 +0000 UTC m=+1109.153439469" Oct 09 09:23:08 crc kubenswrapper[4710]: I1009 09:23:08.998649 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 09 09:23:08 crc kubenswrapper[4710]: I1009 09:23:08.999242 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 09 09:23:10 crc kubenswrapper[4710]: I1009 09:23:10.002891 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 09 09:23:10 crc kubenswrapper[4710]: I1009 09:23:10.933441 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 09 09:23:10 crc kubenswrapper[4710]: I1009 09:23:10.933983 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 09 09:23:11 crc kubenswrapper[4710]: I1009 09:23:11.946620 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="61eb69de-ee17-4084-95dc-0192a6d4a0d4" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.188:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 09 09:23:11 crc kubenswrapper[4710]: I1009 09:23:11.946622 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="61eb69de-ee17-4084-95dc-0192a6d4a0d4" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.188:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 09 09:23:13 crc kubenswrapper[4710]: I1009 09:23:13.998750 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 09 09:23:13 crc kubenswrapper[4710]: I1009 09:23:13.999085 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 09 09:23:15 crc kubenswrapper[4710]: I1009 09:23:15.003328 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 09 09:23:15 crc kubenswrapper[4710]: I1009 09:23:15.013593 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="0a140770-37ca-4b77-8eed-bc3ecfed72db" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.189:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 09 09:23:15 crc kubenswrapper[4710]: I1009 09:23:15.013594 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="0a140770-37ca-4b77-8eed-bc3ecfed72db" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.189:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 09 09:23:15 crc kubenswrapper[4710]: I1009 09:23:15.027416 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 09 09:23:15 crc kubenswrapper[4710]: I1009 09:23:15.750525 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 09 09:23:20 crc kubenswrapper[4710]: I1009 09:23:20.859624 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 09 09:23:20 crc kubenswrapper[4710]: I1009 09:23:20.939547 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 09 09:23:20 crc kubenswrapper[4710]: I1009 09:23:20.939987 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 09 09:23:20 crc kubenswrapper[4710]: I1009 09:23:20.940118 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 09 09:23:20 crc kubenswrapper[4710]: I1009 09:23:20.944884 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 09 09:23:21 crc kubenswrapper[4710]: I1009 09:23:21.774336 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 09 09:23:21 crc kubenswrapper[4710]: I1009 09:23:21.782601 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 09 09:23:24 crc kubenswrapper[4710]: I1009 09:23:24.002816 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 09 09:23:24 crc kubenswrapper[4710]: I1009 09:23:24.004142 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 09 09:23:24 crc kubenswrapper[4710]: I1009 09:23:24.008146 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 09 09:23:24 crc kubenswrapper[4710]: I1009 09:23:24.801198 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 09 09:23:30 crc kubenswrapper[4710]: I1009 09:23:30.965095 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 09:23:32 crc kubenswrapper[4710]: I1009 09:23:32.115467 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 09:23:34 crc kubenswrapper[4710]: I1009 09:23:34.736866 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="b54a6a74-e8d4-4e48-b7dc-7805027dce53" containerName="rabbitmq" containerID="cri-o://7e907b39a89c5b048812250c378e5e35adcd718098aa2955bc803d381e27613d" gracePeriod=604797 Oct 09 09:23:35 crc kubenswrapper[4710]: I1009 09:23:35.295640 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="b54a6a74-e8d4-4e48-b7dc-7805027dce53" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.101:5671: connect: connection refused" Oct 09 09:23:35 crc kubenswrapper[4710]: I1009 09:23:35.546091 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:23:35 crc kubenswrapper[4710]: I1009 09:23:35.546151 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:23:35 crc kubenswrapper[4710]: I1009 09:23:35.546192 4710 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:23:35 crc kubenswrapper[4710]: I1009 09:23:35.546955 4710 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d29bcc87c0210ac354834c57bbe9818a4507c034cce7af695fa8a4fd8067649a"} pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 09:23:35 crc kubenswrapper[4710]: I1009 09:23:35.547011 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" containerID="cri-o://d29bcc87c0210ac354834c57bbe9818a4507c034cce7af695fa8a4fd8067649a" gracePeriod=600 Oct 09 09:23:35 crc kubenswrapper[4710]: I1009 09:23:35.881738 4710 generic.go:334] "Generic (PLEG): container finished" podID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerID="d29bcc87c0210ac354834c57bbe9818a4507c034cce7af695fa8a4fd8067649a" exitCode=0 Oct 09 09:23:35 crc kubenswrapper[4710]: I1009 09:23:35.881937 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerDied","Data":"d29bcc87c0210ac354834c57bbe9818a4507c034cce7af695fa8a4fd8067649a"} Oct 09 09:23:35 crc kubenswrapper[4710]: I1009 09:23:35.882869 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerStarted","Data":"45c1c663886c1898255d44177db40405bdd2ff57beb008c51e435da6768e1c12"} Oct 09 09:23:35 crc kubenswrapper[4710]: I1009 09:23:35.883207 4710 scope.go:117] "RemoveContainer" containerID="8f8033ef1c1546ba9de192c838c8a65cbd27bdb87c7505416703609919040c7b" Oct 09 09:23:36 crc kubenswrapper[4710]: I1009 09:23:36.050396 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="66a04a07-2cc5-4549-9217-d5fbb82a6755" containerName="rabbitmq" containerID="cri-o://a286398dc39e1bfee3caebb8615215907b9a58666084030912c01e9664e1d84c" gracePeriod=604797 Oct 09 09:23:40 crc kubenswrapper[4710]: I1009 09:23:40.930482 4710 generic.go:334] "Generic (PLEG): container finished" podID="b54a6a74-e8d4-4e48-b7dc-7805027dce53" containerID="7e907b39a89c5b048812250c378e5e35adcd718098aa2955bc803d381e27613d" exitCode=0 Oct 09 09:23:40 crc kubenswrapper[4710]: I1009 09:23:40.930603 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b54a6a74-e8d4-4e48-b7dc-7805027dce53","Type":"ContainerDied","Data":"7e907b39a89c5b048812250c378e5e35adcd718098aa2955bc803d381e27613d"} Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.137542 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.240835 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-plugins\") pod \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.240897 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b54a6a74-e8d4-4e48-b7dc-7805027dce53-config-data\") pod \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.240924 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-confd\") pod \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.240947 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-tls\") pod \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.241009 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7q7c\" (UniqueName: \"kubernetes.io/projected/b54a6a74-e8d4-4e48-b7dc-7805027dce53-kube-api-access-r7q7c\") pod \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.241045 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b54a6a74-e8d4-4e48-b7dc-7805027dce53-server-conf\") pod \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.241067 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b54a6a74-e8d4-4e48-b7dc-7805027dce53-erlang-cookie-secret\") pod \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.241144 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.241188 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b54a6a74-e8d4-4e48-b7dc-7805027dce53-plugins-conf\") pod \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.241258 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b54a6a74-e8d4-4e48-b7dc-7805027dce53-pod-info\") pod \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.241316 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-erlang-cookie\") pod \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\" (UID: \"b54a6a74-e8d4-4e48-b7dc-7805027dce53\") " Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.242099 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "b54a6a74-e8d4-4e48-b7dc-7805027dce53" (UID: "b54a6a74-e8d4-4e48-b7dc-7805027dce53"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.242596 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b54a6a74-e8d4-4e48-b7dc-7805027dce53-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "b54a6a74-e8d4-4e48-b7dc-7805027dce53" (UID: "b54a6a74-e8d4-4e48-b7dc-7805027dce53"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.242821 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "b54a6a74-e8d4-4e48-b7dc-7805027dce53" (UID: "b54a6a74-e8d4-4e48-b7dc-7805027dce53"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.252469 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/b54a6a74-e8d4-4e48-b7dc-7805027dce53-pod-info" (OuterVolumeSpecName: "pod-info") pod "b54a6a74-e8d4-4e48-b7dc-7805027dce53" (UID: "b54a6a74-e8d4-4e48-b7dc-7805027dce53"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.252752 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b54a6a74-e8d4-4e48-b7dc-7805027dce53-kube-api-access-r7q7c" (OuterVolumeSpecName: "kube-api-access-r7q7c") pod "b54a6a74-e8d4-4e48-b7dc-7805027dce53" (UID: "b54a6a74-e8d4-4e48-b7dc-7805027dce53"). InnerVolumeSpecName "kube-api-access-r7q7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.252907 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "b54a6a74-e8d4-4e48-b7dc-7805027dce53" (UID: "b54a6a74-e8d4-4e48-b7dc-7805027dce53"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.254652 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "persistence") pod "b54a6a74-e8d4-4e48-b7dc-7805027dce53" (UID: "b54a6a74-e8d4-4e48-b7dc-7805027dce53"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.255546 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b54a6a74-e8d4-4e48-b7dc-7805027dce53-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "b54a6a74-e8d4-4e48-b7dc-7805027dce53" (UID: "b54a6a74-e8d4-4e48-b7dc-7805027dce53"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.285722 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b54a6a74-e8d4-4e48-b7dc-7805027dce53-config-data" (OuterVolumeSpecName: "config-data") pod "b54a6a74-e8d4-4e48-b7dc-7805027dce53" (UID: "b54a6a74-e8d4-4e48-b7dc-7805027dce53"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.313148 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b54a6a74-e8d4-4e48-b7dc-7805027dce53-server-conf" (OuterVolumeSpecName: "server-conf") pod "b54a6a74-e8d4-4e48-b7dc-7805027dce53" (UID: "b54a6a74-e8d4-4e48-b7dc-7805027dce53"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.352957 4710 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b54a6a74-e8d4-4e48-b7dc-7805027dce53-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.352990 4710 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b54a6a74-e8d4-4e48-b7dc-7805027dce53-pod-info\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.353001 4710 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.353011 4710 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.353019 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b54a6a74-e8d4-4e48-b7dc-7805027dce53-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.353027 4710 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.353037 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7q7c\" (UniqueName: \"kubernetes.io/projected/b54a6a74-e8d4-4e48-b7dc-7805027dce53-kube-api-access-r7q7c\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.353044 4710 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b54a6a74-e8d4-4e48-b7dc-7805027dce53-server-conf\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.353052 4710 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b54a6a74-e8d4-4e48-b7dc-7805027dce53-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.353080 4710 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.355897 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "b54a6a74-e8d4-4e48-b7dc-7805027dce53" (UID: "b54a6a74-e8d4-4e48-b7dc-7805027dce53"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.374250 4710 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.455159 4710 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.455188 4710 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b54a6a74-e8d4-4e48-b7dc-7805027dce53-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.942462 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b54a6a74-e8d4-4e48-b7dc-7805027dce53","Type":"ContainerDied","Data":"b7c42f8f7ebb4835cdebee452c000190ddb250946e11a4bd5a49a3eeed5d5780"} Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.942542 4710 scope.go:117] "RemoveContainer" containerID="7e907b39a89c5b048812250c378e5e35adcd718098aa2955bc803d381e27613d" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.943280 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.967216 4710 scope.go:117] "RemoveContainer" containerID="2f54f5b6b2db40672622d4587eeace11474707658f13f2878be4600122b0ee55" Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.985461 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 09:23:41 crc kubenswrapper[4710]: I1009 09:23:41.998321 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.029656 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 09:23:42 crc kubenswrapper[4710]: E1009 09:23:42.030167 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b54a6a74-e8d4-4e48-b7dc-7805027dce53" containerName="setup-container" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.030192 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="b54a6a74-e8d4-4e48-b7dc-7805027dce53" containerName="setup-container" Oct 09 09:23:42 crc kubenswrapper[4710]: E1009 09:23:42.030232 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b54a6a74-e8d4-4e48-b7dc-7805027dce53" containerName="rabbitmq" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.030239 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="b54a6a74-e8d4-4e48-b7dc-7805027dce53" containerName="rabbitmq" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.030506 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="b54a6a74-e8d4-4e48-b7dc-7805027dce53" containerName="rabbitmq" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.031649 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.035615 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.035783 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.036656 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.036758 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-7c4c4" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.036760 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.036716 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.036671 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.050173 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.168441 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/818b872a-e3f5-475f-ac6d-99810ac2f39b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.168689 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/818b872a-e3f5-475f-ac6d-99810ac2f39b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.168715 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z4j5\" (UniqueName: \"kubernetes.io/projected/818b872a-e3f5-475f-ac6d-99810ac2f39b-kube-api-access-7z4j5\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.168747 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/818b872a-e3f5-475f-ac6d-99810ac2f39b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.168763 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/818b872a-e3f5-475f-ac6d-99810ac2f39b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.168813 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/818b872a-e3f5-475f-ac6d-99810ac2f39b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.168965 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/818b872a-e3f5-475f-ac6d-99810ac2f39b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.169030 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.169069 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/818b872a-e3f5-475f-ac6d-99810ac2f39b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.169125 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/818b872a-e3f5-475f-ac6d-99810ac2f39b-config-data\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.169338 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/818b872a-e3f5-475f-ac6d-99810ac2f39b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.271769 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/818b872a-e3f5-475f-ac6d-99810ac2f39b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.271862 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/818b872a-e3f5-475f-ac6d-99810ac2f39b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.271895 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z4j5\" (UniqueName: \"kubernetes.io/projected/818b872a-e3f5-475f-ac6d-99810ac2f39b-kube-api-access-7z4j5\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.271938 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/818b872a-e3f5-475f-ac6d-99810ac2f39b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.271958 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/818b872a-e3f5-475f-ac6d-99810ac2f39b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.272018 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/818b872a-e3f5-475f-ac6d-99810ac2f39b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.272148 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/818b872a-e3f5-475f-ac6d-99810ac2f39b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.272238 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.272291 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/818b872a-e3f5-475f-ac6d-99810ac2f39b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.272381 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/818b872a-e3f5-475f-ac6d-99810ac2f39b-config-data\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.272444 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/818b872a-e3f5-475f-ac6d-99810ac2f39b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.272948 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/818b872a-e3f5-475f-ac6d-99810ac2f39b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.274450 4710 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.274768 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/818b872a-e3f5-475f-ac6d-99810ac2f39b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.275737 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/818b872a-e3f5-475f-ac6d-99810ac2f39b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.275793 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/818b872a-e3f5-475f-ac6d-99810ac2f39b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.276041 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/818b872a-e3f5-475f-ac6d-99810ac2f39b-config-data\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.278371 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/818b872a-e3f5-475f-ac6d-99810ac2f39b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.286760 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/818b872a-e3f5-475f-ac6d-99810ac2f39b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.290575 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/818b872a-e3f5-475f-ac6d-99810ac2f39b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.296089 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/818b872a-e3f5-475f-ac6d-99810ac2f39b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.298975 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z4j5\" (UniqueName: \"kubernetes.io/projected/818b872a-e3f5-475f-ac6d-99810ac2f39b-kube-api-access-7z4j5\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.338267 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-server-0\" (UID: \"818b872a-e3f5-475f-ac6d-99810ac2f39b\") " pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.352289 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.395665 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.576703 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"66a04a07-2cc5-4549-9217-d5fbb82a6755\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.576753 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/66a04a07-2cc5-4549-9217-d5fbb82a6755-config-data\") pod \"66a04a07-2cc5-4549-9217-d5fbb82a6755\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.576796 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-plugins\") pod \"66a04a07-2cc5-4549-9217-d5fbb82a6755\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.576850 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/66a04a07-2cc5-4549-9217-d5fbb82a6755-plugins-conf\") pod \"66a04a07-2cc5-4549-9217-d5fbb82a6755\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.576871 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-confd\") pod \"66a04a07-2cc5-4549-9217-d5fbb82a6755\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.577021 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-erlang-cookie\") pod \"66a04a07-2cc5-4549-9217-d5fbb82a6755\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.577110 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/66a04a07-2cc5-4549-9217-d5fbb82a6755-pod-info\") pod \"66a04a07-2cc5-4549-9217-d5fbb82a6755\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.577232 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/66a04a07-2cc5-4549-9217-d5fbb82a6755-erlang-cookie-secret\") pod \"66a04a07-2cc5-4549-9217-d5fbb82a6755\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.577251 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9drnb\" (UniqueName: \"kubernetes.io/projected/66a04a07-2cc5-4549-9217-d5fbb82a6755-kube-api-access-9drnb\") pod \"66a04a07-2cc5-4549-9217-d5fbb82a6755\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.577341 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-tls\") pod \"66a04a07-2cc5-4549-9217-d5fbb82a6755\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.577385 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/66a04a07-2cc5-4549-9217-d5fbb82a6755-server-conf\") pod \"66a04a07-2cc5-4549-9217-d5fbb82a6755\" (UID: \"66a04a07-2cc5-4549-9217-d5fbb82a6755\") " Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.593170 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "66a04a07-2cc5-4549-9217-d5fbb82a6755" (UID: "66a04a07-2cc5-4549-9217-d5fbb82a6755"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.593625 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "66a04a07-2cc5-4549-9217-d5fbb82a6755" (UID: "66a04a07-2cc5-4549-9217-d5fbb82a6755"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.597028 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66a04a07-2cc5-4549-9217-d5fbb82a6755-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "66a04a07-2cc5-4549-9217-d5fbb82a6755" (UID: "66a04a07-2cc5-4549-9217-d5fbb82a6755"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.599150 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66a04a07-2cc5-4549-9217-d5fbb82a6755-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "66a04a07-2cc5-4549-9217-d5fbb82a6755" (UID: "66a04a07-2cc5-4549-9217-d5fbb82a6755"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.607155 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "66a04a07-2cc5-4549-9217-d5fbb82a6755" (UID: "66a04a07-2cc5-4549-9217-d5fbb82a6755"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.608752 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "66a04a07-2cc5-4549-9217-d5fbb82a6755" (UID: "66a04a07-2cc5-4549-9217-d5fbb82a6755"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.610371 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66a04a07-2cc5-4549-9217-d5fbb82a6755-kube-api-access-9drnb" (OuterVolumeSpecName: "kube-api-access-9drnb") pod "66a04a07-2cc5-4549-9217-d5fbb82a6755" (UID: "66a04a07-2cc5-4549-9217-d5fbb82a6755"). InnerVolumeSpecName "kube-api-access-9drnb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.617709 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66a04a07-2cc5-4549-9217-d5fbb82a6755-config-data" (OuterVolumeSpecName: "config-data") pod "66a04a07-2cc5-4549-9217-d5fbb82a6755" (UID: "66a04a07-2cc5-4549-9217-d5fbb82a6755"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.617783 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/66a04a07-2cc5-4549-9217-d5fbb82a6755-pod-info" (OuterVolumeSpecName: "pod-info") pod "66a04a07-2cc5-4549-9217-d5fbb82a6755" (UID: "66a04a07-2cc5-4549-9217-d5fbb82a6755"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.658505 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.674595 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66a04a07-2cc5-4549-9217-d5fbb82a6755-server-conf" (OuterVolumeSpecName: "server-conf") pod "66a04a07-2cc5-4549-9217-d5fbb82a6755" (UID: "66a04a07-2cc5-4549-9217-d5fbb82a6755"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.680636 4710 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.680777 4710 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/66a04a07-2cc5-4549-9217-d5fbb82a6755-server-conf\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.680854 4710 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.680909 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/66a04a07-2cc5-4549-9217-d5fbb82a6755-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.680963 4710 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.681021 4710 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/66a04a07-2cc5-4549-9217-d5fbb82a6755-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.681076 4710 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.681135 4710 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/66a04a07-2cc5-4549-9217-d5fbb82a6755-pod-info\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.681186 4710 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/66a04a07-2cc5-4549-9217-d5fbb82a6755-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.681234 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9drnb\" (UniqueName: \"kubernetes.io/projected/66a04a07-2cc5-4549-9217-d5fbb82a6755-kube-api-access-9drnb\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.708032 4710 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.716816 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "66a04a07-2cc5-4549-9217-d5fbb82a6755" (UID: "66a04a07-2cc5-4549-9217-d5fbb82a6755"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.783678 4710 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.783874 4710 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/66a04a07-2cc5-4549-9217-d5fbb82a6755-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.845484 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b54a6a74-e8d4-4e48-b7dc-7805027dce53" path="/var/lib/kubelet/pods/b54a6a74-e8d4-4e48-b7dc-7805027dce53/volumes" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.955149 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"818b872a-e3f5-475f-ac6d-99810ac2f39b","Type":"ContainerStarted","Data":"90bcfa8265539cb89689f48869f73cc6dcc7910a69917f8f47039843cd4b7505"} Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.956783 4710 generic.go:334] "Generic (PLEG): container finished" podID="66a04a07-2cc5-4549-9217-d5fbb82a6755" containerID="a286398dc39e1bfee3caebb8615215907b9a58666084030912c01e9664e1d84c" exitCode=0 Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.956835 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"66a04a07-2cc5-4549-9217-d5fbb82a6755","Type":"ContainerDied","Data":"a286398dc39e1bfee3caebb8615215907b9a58666084030912c01e9664e1d84c"} Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.956856 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"66a04a07-2cc5-4549-9217-d5fbb82a6755","Type":"ContainerDied","Data":"ce2bf4562cffe13ab87edabf98f76f15367dbb304554de50a85be3ec511ca4ac"} Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.956876 4710 scope.go:117] "RemoveContainer" containerID="a286398dc39e1bfee3caebb8615215907b9a58666084030912c01e9664e1d84c" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.957029 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:42 crc kubenswrapper[4710]: I1009 09:23:42.995250 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.002075 4710 scope.go:117] "RemoveContainer" containerID="af95ef995d1637c54da57d37bb05910a94919fa0b7fec16b0dc08edb0af96906" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.011534 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.017581 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 09:23:43 crc kubenswrapper[4710]: E1009 09:23:43.018018 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66a04a07-2cc5-4549-9217-d5fbb82a6755" containerName="rabbitmq" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.018037 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="66a04a07-2cc5-4549-9217-d5fbb82a6755" containerName="rabbitmq" Oct 09 09:23:43 crc kubenswrapper[4710]: E1009 09:23:43.018059 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66a04a07-2cc5-4549-9217-d5fbb82a6755" containerName="setup-container" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.018065 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="66a04a07-2cc5-4549-9217-d5fbb82a6755" containerName="setup-container" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.018224 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="66a04a07-2cc5-4549-9217-d5fbb82a6755" containerName="rabbitmq" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.019112 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.023164 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.023258 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.023346 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.023418 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.023670 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.023755 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-rcfnn" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.024005 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.030373 4710 scope.go:117] "RemoveContainer" containerID="a286398dc39e1bfee3caebb8615215907b9a58666084030912c01e9664e1d84c" Oct 09 09:23:43 crc kubenswrapper[4710]: E1009 09:23:43.032767 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a286398dc39e1bfee3caebb8615215907b9a58666084030912c01e9664e1d84c\": container with ID starting with a286398dc39e1bfee3caebb8615215907b9a58666084030912c01e9664e1d84c not found: ID does not exist" containerID="a286398dc39e1bfee3caebb8615215907b9a58666084030912c01e9664e1d84c" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.032807 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a286398dc39e1bfee3caebb8615215907b9a58666084030912c01e9664e1d84c"} err="failed to get container status \"a286398dc39e1bfee3caebb8615215907b9a58666084030912c01e9664e1d84c\": rpc error: code = NotFound desc = could not find container \"a286398dc39e1bfee3caebb8615215907b9a58666084030912c01e9664e1d84c\": container with ID starting with a286398dc39e1bfee3caebb8615215907b9a58666084030912c01e9664e1d84c not found: ID does not exist" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.032828 4710 scope.go:117] "RemoveContainer" containerID="af95ef995d1637c54da57d37bb05910a94919fa0b7fec16b0dc08edb0af96906" Oct 09 09:23:43 crc kubenswrapper[4710]: E1009 09:23:43.035967 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af95ef995d1637c54da57d37bb05910a94919fa0b7fec16b0dc08edb0af96906\": container with ID starting with af95ef995d1637c54da57d37bb05910a94919fa0b7fec16b0dc08edb0af96906 not found: ID does not exist" containerID="af95ef995d1637c54da57d37bb05910a94919fa0b7fec16b0dc08edb0af96906" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.035996 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af95ef995d1637c54da57d37bb05910a94919fa0b7fec16b0dc08edb0af96906"} err="failed to get container status \"af95ef995d1637c54da57d37bb05910a94919fa0b7fec16b0dc08edb0af96906\": rpc error: code = NotFound desc = could not find container \"af95ef995d1637c54da57d37bb05910a94919fa0b7fec16b0dc08edb0af96906\": container with ID starting with af95ef995d1637c54da57d37bb05910a94919fa0b7fec16b0dc08edb0af96906 not found: ID does not exist" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.043555 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.191874 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.191958 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3011d32f-6110-456d-a247-f6298b1d46e3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.191990 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3011d32f-6110-456d-a247-f6298b1d46e3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.192023 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3011d32f-6110-456d-a247-f6298b1d46e3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.192056 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3011d32f-6110-456d-a247-f6298b1d46e3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.192134 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3011d32f-6110-456d-a247-f6298b1d46e3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.192157 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3011d32f-6110-456d-a247-f6298b1d46e3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.192175 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jg4j7\" (UniqueName: \"kubernetes.io/projected/3011d32f-6110-456d-a247-f6298b1d46e3-kube-api-access-jg4j7\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.192341 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3011d32f-6110-456d-a247-f6298b1d46e3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.192393 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3011d32f-6110-456d-a247-f6298b1d46e3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.192472 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3011d32f-6110-456d-a247-f6298b1d46e3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.294513 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3011d32f-6110-456d-a247-f6298b1d46e3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.294591 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3011d32f-6110-456d-a247-f6298b1d46e3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.294611 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3011d32f-6110-456d-a247-f6298b1d46e3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.294639 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jg4j7\" (UniqueName: \"kubernetes.io/projected/3011d32f-6110-456d-a247-f6298b1d46e3-kube-api-access-jg4j7\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.294712 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3011d32f-6110-456d-a247-f6298b1d46e3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.294731 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3011d32f-6110-456d-a247-f6298b1d46e3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.294758 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3011d32f-6110-456d-a247-f6298b1d46e3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.294809 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.294846 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3011d32f-6110-456d-a247-f6298b1d46e3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.294866 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3011d32f-6110-456d-a247-f6298b1d46e3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.294896 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3011d32f-6110-456d-a247-f6298b1d46e3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.295261 4710 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.295899 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3011d32f-6110-456d-a247-f6298b1d46e3-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.296115 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3011d32f-6110-456d-a247-f6298b1d46e3-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.296976 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3011d32f-6110-456d-a247-f6298b1d46e3-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.297276 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3011d32f-6110-456d-a247-f6298b1d46e3-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.297646 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3011d32f-6110-456d-a247-f6298b1d46e3-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.311961 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jg4j7\" (UniqueName: \"kubernetes.io/projected/3011d32f-6110-456d-a247-f6298b1d46e3-kube-api-access-jg4j7\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.383421 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3011d32f-6110-456d-a247-f6298b1d46e3-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.383906 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3011d32f-6110-456d-a247-f6298b1d46e3-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.384636 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3011d32f-6110-456d-a247-f6298b1d46e3-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.385390 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3011d32f-6110-456d-a247-f6298b1d46e3-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.504093 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"3011d32f-6110-456d-a247-f6298b1d46e3\") " pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.671037 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.878636 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-64fb5d8fd7-cw48d"] Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.880501 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.886019 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.902662 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-64fb5d8fd7-cw48d"] Oct 09 09:23:43 crc kubenswrapper[4710]: I1009 09:23:43.967403 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"818b872a-e3f5-475f-ac6d-99810ac2f39b","Type":"ContainerStarted","Data":"df8f014f00108c3c5963e1323801f883146e76d01074603074e0fe8104fabd9f"} Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.007800 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-openstack-edpm-ipam\") pod \"dnsmasq-dns-64fb5d8fd7-cw48d\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.007914 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-config\") pod \"dnsmasq-dns-64fb5d8fd7-cw48d\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.008019 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spq48\" (UniqueName: \"kubernetes.io/projected/10c76909-860a-4b2b-977e-6fcdc2089de0-kube-api-access-spq48\") pod \"dnsmasq-dns-64fb5d8fd7-cw48d\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.008106 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-ovsdbserver-nb\") pod \"dnsmasq-dns-64fb5d8fd7-cw48d\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.008213 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-dns-svc\") pod \"dnsmasq-dns-64fb5d8fd7-cw48d\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.008260 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-ovsdbserver-sb\") pod \"dnsmasq-dns-64fb5d8fd7-cw48d\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.064392 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.116668 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-ovsdbserver-nb\") pod \"dnsmasq-dns-64fb5d8fd7-cw48d\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.116775 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-dns-svc\") pod \"dnsmasq-dns-64fb5d8fd7-cw48d\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.116805 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-ovsdbserver-sb\") pod \"dnsmasq-dns-64fb5d8fd7-cw48d\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.116850 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-openstack-edpm-ipam\") pod \"dnsmasq-dns-64fb5d8fd7-cw48d\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.116960 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-config\") pod \"dnsmasq-dns-64fb5d8fd7-cw48d\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.117102 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spq48\" (UniqueName: \"kubernetes.io/projected/10c76909-860a-4b2b-977e-6fcdc2089de0-kube-api-access-spq48\") pod \"dnsmasq-dns-64fb5d8fd7-cw48d\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.118117 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-dns-svc\") pod \"dnsmasq-dns-64fb5d8fd7-cw48d\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.119701 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-config\") pod \"dnsmasq-dns-64fb5d8fd7-cw48d\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.119949 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-ovsdbserver-sb\") pod \"dnsmasq-dns-64fb5d8fd7-cw48d\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.121022 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-ovsdbserver-nb\") pod \"dnsmasq-dns-64fb5d8fd7-cw48d\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.121108 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-openstack-edpm-ipam\") pod \"dnsmasq-dns-64fb5d8fd7-cw48d\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.132245 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spq48\" (UniqueName: \"kubernetes.io/projected/10c76909-860a-4b2b-977e-6fcdc2089de0-kube-api-access-spq48\") pod \"dnsmasq-dns-64fb5d8fd7-cw48d\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.212734 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.628492 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-64fb5d8fd7-cw48d"] Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.824769 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66a04a07-2cc5-4549-9217-d5fbb82a6755" path="/var/lib/kubelet/pods/66a04a07-2cc5-4549-9217-d5fbb82a6755/volumes" Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.979741 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" event={"ID":"10c76909-860a-4b2b-977e-6fcdc2089de0","Type":"ContainerStarted","Data":"8ff2c0476b4470d2d459a88dc8a40e2b16d43119cd0758c555e8e2fadaead7d9"} Oct 09 09:23:44 crc kubenswrapper[4710]: I1009 09:23:44.981865 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3011d32f-6110-456d-a247-f6298b1d46e3","Type":"ContainerStarted","Data":"771cb2745583ff21a25d042651e4f0c1197103883788676770e88526fe93c5e5"} Oct 09 09:23:45 crc kubenswrapper[4710]: I1009 09:23:45.990817 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3011d32f-6110-456d-a247-f6298b1d46e3","Type":"ContainerStarted","Data":"db6c9cfcdedea7743fb53e343d2c330d74408bb509be72aa400bf70d369e8198"} Oct 09 09:23:45 crc kubenswrapper[4710]: I1009 09:23:45.992717 4710 generic.go:334] "Generic (PLEG): container finished" podID="10c76909-860a-4b2b-977e-6fcdc2089de0" containerID="325e758f33ebdc9316229e6077f215f59497bb12d5c519e3ac59510e2824a5fe" exitCode=0 Oct 09 09:23:45 crc kubenswrapper[4710]: I1009 09:23:45.992766 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" event={"ID":"10c76909-860a-4b2b-977e-6fcdc2089de0","Type":"ContainerDied","Data":"325e758f33ebdc9316229e6077f215f59497bb12d5c519e3ac59510e2824a5fe"} Oct 09 09:23:47 crc kubenswrapper[4710]: I1009 09:23:47.004139 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" event={"ID":"10c76909-860a-4b2b-977e-6fcdc2089de0","Type":"ContainerStarted","Data":"db9c8fe846fc4af9ca0d534c008950318e956f9fd02d954ee17a2d1fdaa92af5"} Oct 09 09:23:47 crc kubenswrapper[4710]: I1009 09:23:47.025069 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" podStartSLOduration=4.025049037 podStartE2EDuration="4.025049037s" podCreationTimestamp="2025-10-09 09:23:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:23:47.017000717 +0000 UTC m=+1150.507109104" watchObservedRunningTime="2025-10-09 09:23:47.025049037 +0000 UTC m=+1150.515157434" Oct 09 09:23:48 crc kubenswrapper[4710]: I1009 09:23:48.012297 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.214424 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.283062 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-665946c669-kdpds"] Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.283366 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-665946c669-kdpds" podUID="b304fcec-3561-4932-b04d-5e04c64fbc7c" containerName="dnsmasq-dns" containerID="cri-o://e0901dc3857634576d99c8f2fb5655ec8de54a3d3d126bcb6701cca76c79fac3" gracePeriod=10 Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.422781 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-867c8fd5c5-lmpr2"] Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.424620 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.437448 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-867c8fd5c5-lmpr2"] Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.526059 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-ovsdbserver-nb\") pod \"dnsmasq-dns-867c8fd5c5-lmpr2\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.526096 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-dns-svc\") pod \"dnsmasq-dns-867c8fd5c5-lmpr2\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.526133 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-config\") pod \"dnsmasq-dns-867c8fd5c5-lmpr2\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.526157 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7qng\" (UniqueName: \"kubernetes.io/projected/8485f386-dd23-4d8c-89de-dcb805d8d745-kube-api-access-g7qng\") pod \"dnsmasq-dns-867c8fd5c5-lmpr2\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.526300 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-openstack-edpm-ipam\") pod \"dnsmasq-dns-867c8fd5c5-lmpr2\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.526821 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-ovsdbserver-sb\") pod \"dnsmasq-dns-867c8fd5c5-lmpr2\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.630576 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-ovsdbserver-nb\") pod \"dnsmasq-dns-867c8fd5c5-lmpr2\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.630635 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-dns-svc\") pod \"dnsmasq-dns-867c8fd5c5-lmpr2\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.630714 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-config\") pod \"dnsmasq-dns-867c8fd5c5-lmpr2\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.630750 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7qng\" (UniqueName: \"kubernetes.io/projected/8485f386-dd23-4d8c-89de-dcb805d8d745-kube-api-access-g7qng\") pod \"dnsmasq-dns-867c8fd5c5-lmpr2\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.630814 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-openstack-edpm-ipam\") pod \"dnsmasq-dns-867c8fd5c5-lmpr2\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.631880 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-config\") pod \"dnsmasq-dns-867c8fd5c5-lmpr2\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.631967 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-ovsdbserver-nb\") pod \"dnsmasq-dns-867c8fd5c5-lmpr2\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.632038 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-ovsdbserver-sb\") pod \"dnsmasq-dns-867c8fd5c5-lmpr2\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.632648 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-ovsdbserver-sb\") pod \"dnsmasq-dns-867c8fd5c5-lmpr2\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.632770 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-dns-svc\") pod \"dnsmasq-dns-867c8fd5c5-lmpr2\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.644886 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-openstack-edpm-ipam\") pod \"dnsmasq-dns-867c8fd5c5-lmpr2\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.669260 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7qng\" (UniqueName: \"kubernetes.io/projected/8485f386-dd23-4d8c-89de-dcb805d8d745-kube-api-access-g7qng\") pod \"dnsmasq-dns-867c8fd5c5-lmpr2\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.756508 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-665946c669-kdpds" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.764819 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.942829 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-ovsdbserver-sb\") pod \"b304fcec-3561-4932-b04d-5e04c64fbc7c\" (UID: \"b304fcec-3561-4932-b04d-5e04c64fbc7c\") " Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.942976 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-dns-svc\") pod \"b304fcec-3561-4932-b04d-5e04c64fbc7c\" (UID: \"b304fcec-3561-4932-b04d-5e04c64fbc7c\") " Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.943066 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c979g\" (UniqueName: \"kubernetes.io/projected/b304fcec-3561-4932-b04d-5e04c64fbc7c-kube-api-access-c979g\") pod \"b304fcec-3561-4932-b04d-5e04c64fbc7c\" (UID: \"b304fcec-3561-4932-b04d-5e04c64fbc7c\") " Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.943277 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-ovsdbserver-nb\") pod \"b304fcec-3561-4932-b04d-5e04c64fbc7c\" (UID: \"b304fcec-3561-4932-b04d-5e04c64fbc7c\") " Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.943313 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-config\") pod \"b304fcec-3561-4932-b04d-5e04c64fbc7c\" (UID: \"b304fcec-3561-4932-b04d-5e04c64fbc7c\") " Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.964967 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b304fcec-3561-4932-b04d-5e04c64fbc7c-kube-api-access-c979g" (OuterVolumeSpecName: "kube-api-access-c979g") pod "b304fcec-3561-4932-b04d-5e04c64fbc7c" (UID: "b304fcec-3561-4932-b04d-5e04c64fbc7c"). InnerVolumeSpecName "kube-api-access-c979g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:23:54 crc kubenswrapper[4710]: I1009 09:23:54.998182 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b304fcec-3561-4932-b04d-5e04c64fbc7c" (UID: "b304fcec-3561-4932-b04d-5e04c64fbc7c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.030109 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b304fcec-3561-4932-b04d-5e04c64fbc7c" (UID: "b304fcec-3561-4932-b04d-5e04c64fbc7c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.046028 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b304fcec-3561-4932-b04d-5e04c64fbc7c" (UID: "b304fcec-3561-4932-b04d-5e04c64fbc7c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.046198 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-ovsdbserver-nb\") pod \"b304fcec-3561-4932-b04d-5e04c64fbc7c\" (UID: \"b304fcec-3561-4932-b04d-5e04c64fbc7c\") " Oct 09 09:23:55 crc kubenswrapper[4710]: W1009 09:23:55.046530 4710 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/b304fcec-3561-4932-b04d-5e04c64fbc7c/volumes/kubernetes.io~configmap/ovsdbserver-nb Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.046555 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b304fcec-3561-4932-b04d-5e04c64fbc7c" (UID: "b304fcec-3561-4932-b04d-5e04c64fbc7c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.047004 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.047078 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.047132 4710 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.047181 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c979g\" (UniqueName: \"kubernetes.io/projected/b304fcec-3561-4932-b04d-5e04c64fbc7c-kube-api-access-c979g\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.053084 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-config" (OuterVolumeSpecName: "config") pod "b304fcec-3561-4932-b04d-5e04c64fbc7c" (UID: "b304fcec-3561-4932-b04d-5e04c64fbc7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.073650 4710 generic.go:334] "Generic (PLEG): container finished" podID="b304fcec-3561-4932-b04d-5e04c64fbc7c" containerID="e0901dc3857634576d99c8f2fb5655ec8de54a3d3d126bcb6701cca76c79fac3" exitCode=0 Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.073699 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-665946c669-kdpds" event={"ID":"b304fcec-3561-4932-b04d-5e04c64fbc7c","Type":"ContainerDied","Data":"e0901dc3857634576d99c8f2fb5655ec8de54a3d3d126bcb6701cca76c79fac3"} Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.073708 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-665946c669-kdpds" Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.073738 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-665946c669-kdpds" event={"ID":"b304fcec-3561-4932-b04d-5e04c64fbc7c","Type":"ContainerDied","Data":"f43017271c562b71bd62fba85ba9a4801208f131081d39e4a6f537ea3de5892c"} Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.073756 4710 scope.go:117] "RemoveContainer" containerID="e0901dc3857634576d99c8f2fb5655ec8de54a3d3d126bcb6701cca76c79fac3" Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.103184 4710 scope.go:117] "RemoveContainer" containerID="6db41d3d0d76b4c02de77e191aa70f1797aea956037f2f96f40bdb8e7cfd0a5f" Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.140917 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-665946c669-kdpds"] Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.143130 4710 scope.go:117] "RemoveContainer" containerID="e0901dc3857634576d99c8f2fb5655ec8de54a3d3d126bcb6701cca76c79fac3" Oct 09 09:23:55 crc kubenswrapper[4710]: E1009 09:23:55.143618 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0901dc3857634576d99c8f2fb5655ec8de54a3d3d126bcb6701cca76c79fac3\": container with ID starting with e0901dc3857634576d99c8f2fb5655ec8de54a3d3d126bcb6701cca76c79fac3 not found: ID does not exist" containerID="e0901dc3857634576d99c8f2fb5655ec8de54a3d3d126bcb6701cca76c79fac3" Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.143653 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0901dc3857634576d99c8f2fb5655ec8de54a3d3d126bcb6701cca76c79fac3"} err="failed to get container status \"e0901dc3857634576d99c8f2fb5655ec8de54a3d3d126bcb6701cca76c79fac3\": rpc error: code = NotFound desc = could not find container \"e0901dc3857634576d99c8f2fb5655ec8de54a3d3d126bcb6701cca76c79fac3\": container with ID starting with e0901dc3857634576d99c8f2fb5655ec8de54a3d3d126bcb6701cca76c79fac3 not found: ID does not exist" Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.143680 4710 scope.go:117] "RemoveContainer" containerID="6db41d3d0d76b4c02de77e191aa70f1797aea956037f2f96f40bdb8e7cfd0a5f" Oct 09 09:23:55 crc kubenswrapper[4710]: E1009 09:23:55.144098 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6db41d3d0d76b4c02de77e191aa70f1797aea956037f2f96f40bdb8e7cfd0a5f\": container with ID starting with 6db41d3d0d76b4c02de77e191aa70f1797aea956037f2f96f40bdb8e7cfd0a5f not found: ID does not exist" containerID="6db41d3d0d76b4c02de77e191aa70f1797aea956037f2f96f40bdb8e7cfd0a5f" Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.144139 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6db41d3d0d76b4c02de77e191aa70f1797aea956037f2f96f40bdb8e7cfd0a5f"} err="failed to get container status \"6db41d3d0d76b4c02de77e191aa70f1797aea956037f2f96f40bdb8e7cfd0a5f\": rpc error: code = NotFound desc = could not find container \"6db41d3d0d76b4c02de77e191aa70f1797aea956037f2f96f40bdb8e7cfd0a5f\": container with ID starting with 6db41d3d0d76b4c02de77e191aa70f1797aea956037f2f96f40bdb8e7cfd0a5f not found: ID does not exist" Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.149918 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b304fcec-3561-4932-b04d-5e04c64fbc7c-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.153757 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-665946c669-kdpds"] Oct 09 09:23:55 crc kubenswrapper[4710]: I1009 09:23:55.230409 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-867c8fd5c5-lmpr2"] Oct 09 09:23:55 crc kubenswrapper[4710]: W1009 09:23:55.237358 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8485f386_dd23_4d8c_89de_dcb805d8d745.slice/crio-4200b97673241f39cb15581e29b919cd075777aaf3e1fc1113bc21273d62dd55 WatchSource:0}: Error finding container 4200b97673241f39cb15581e29b919cd075777aaf3e1fc1113bc21273d62dd55: Status 404 returned error can't find the container with id 4200b97673241f39cb15581e29b919cd075777aaf3e1fc1113bc21273d62dd55 Oct 09 09:23:56 crc kubenswrapper[4710]: I1009 09:23:56.088845 4710 generic.go:334] "Generic (PLEG): container finished" podID="8485f386-dd23-4d8c-89de-dcb805d8d745" containerID="8ee7a0d86d9a6017b8ebd6c9f0e7b7c0a7869a4169cb812a52ea78a01e8d964e" exitCode=0 Oct 09 09:23:56 crc kubenswrapper[4710]: I1009 09:23:56.089031 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" event={"ID":"8485f386-dd23-4d8c-89de-dcb805d8d745","Type":"ContainerDied","Data":"8ee7a0d86d9a6017b8ebd6c9f0e7b7c0a7869a4169cb812a52ea78a01e8d964e"} Oct 09 09:23:56 crc kubenswrapper[4710]: I1009 09:23:56.089516 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" event={"ID":"8485f386-dd23-4d8c-89de-dcb805d8d745","Type":"ContainerStarted","Data":"4200b97673241f39cb15581e29b919cd075777aaf3e1fc1113bc21273d62dd55"} Oct 09 09:23:56 crc kubenswrapper[4710]: I1009 09:23:56.838491 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b304fcec-3561-4932-b04d-5e04c64fbc7c" path="/var/lib/kubelet/pods/b304fcec-3561-4932-b04d-5e04c64fbc7c/volumes" Oct 09 09:23:57 crc kubenswrapper[4710]: I1009 09:23:57.102781 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" event={"ID":"8485f386-dd23-4d8c-89de-dcb805d8d745","Type":"ContainerStarted","Data":"71b324a27cf5c64b9578d498d815a5d83e42a31679941e92d217a2fa71181e6c"} Oct 09 09:23:57 crc kubenswrapper[4710]: I1009 09:23:57.128403 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" podStartSLOduration=3.128388332 podStartE2EDuration="3.128388332s" podCreationTimestamp="2025-10-09 09:23:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:23:57.118339889 +0000 UTC m=+1160.608448286" watchObservedRunningTime="2025-10-09 09:23:57.128388332 +0000 UTC m=+1160.618496729" Oct 09 09:23:58 crc kubenswrapper[4710]: I1009 09:23:58.112184 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:24:04 crc kubenswrapper[4710]: I1009 09:24:04.766599 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:24:04 crc kubenswrapper[4710]: I1009 09:24:04.829862 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-64fb5d8fd7-cw48d"] Oct 09 09:24:04 crc kubenswrapper[4710]: I1009 09:24:04.830042 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" podUID="10c76909-860a-4b2b-977e-6fcdc2089de0" containerName="dnsmasq-dns" containerID="cri-o://db9c8fe846fc4af9ca0d534c008950318e956f9fd02d954ee17a2d1fdaa92af5" gracePeriod=10 Oct 09 09:24:05 crc kubenswrapper[4710]: I1009 09:24:05.179054 4710 generic.go:334] "Generic (PLEG): container finished" podID="10c76909-860a-4b2b-977e-6fcdc2089de0" containerID="db9c8fe846fc4af9ca0d534c008950318e956f9fd02d954ee17a2d1fdaa92af5" exitCode=0 Oct 09 09:24:05 crc kubenswrapper[4710]: I1009 09:24:05.179105 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" event={"ID":"10c76909-860a-4b2b-977e-6fcdc2089de0","Type":"ContainerDied","Data":"db9c8fe846fc4af9ca0d534c008950318e956f9fd02d954ee17a2d1fdaa92af5"} Oct 09 09:24:05 crc kubenswrapper[4710]: I1009 09:24:05.259655 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:24:05 crc kubenswrapper[4710]: I1009 09:24:05.346653 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-spq48\" (UniqueName: \"kubernetes.io/projected/10c76909-860a-4b2b-977e-6fcdc2089de0-kube-api-access-spq48\") pod \"10c76909-860a-4b2b-977e-6fcdc2089de0\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " Oct 09 09:24:05 crc kubenswrapper[4710]: I1009 09:24:05.346758 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-dns-svc\") pod \"10c76909-860a-4b2b-977e-6fcdc2089de0\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " Oct 09 09:24:05 crc kubenswrapper[4710]: I1009 09:24:05.346841 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-config\") pod \"10c76909-860a-4b2b-977e-6fcdc2089de0\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " Oct 09 09:24:05 crc kubenswrapper[4710]: I1009 09:24:05.346902 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-openstack-edpm-ipam\") pod \"10c76909-860a-4b2b-977e-6fcdc2089de0\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " Oct 09 09:24:05 crc kubenswrapper[4710]: I1009 09:24:05.346973 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-ovsdbserver-sb\") pod \"10c76909-860a-4b2b-977e-6fcdc2089de0\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " Oct 09 09:24:05 crc kubenswrapper[4710]: I1009 09:24:05.347137 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-ovsdbserver-nb\") pod \"10c76909-860a-4b2b-977e-6fcdc2089de0\" (UID: \"10c76909-860a-4b2b-977e-6fcdc2089de0\") " Oct 09 09:24:05 crc kubenswrapper[4710]: I1009 09:24:05.363627 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10c76909-860a-4b2b-977e-6fcdc2089de0-kube-api-access-spq48" (OuterVolumeSpecName: "kube-api-access-spq48") pod "10c76909-860a-4b2b-977e-6fcdc2089de0" (UID: "10c76909-860a-4b2b-977e-6fcdc2089de0"). InnerVolumeSpecName "kube-api-access-spq48". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:24:05 crc kubenswrapper[4710]: I1009 09:24:05.393488 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-config" (OuterVolumeSpecName: "config") pod "10c76909-860a-4b2b-977e-6fcdc2089de0" (UID: "10c76909-860a-4b2b-977e-6fcdc2089de0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:24:05 crc kubenswrapper[4710]: I1009 09:24:05.393488 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "10c76909-860a-4b2b-977e-6fcdc2089de0" (UID: "10c76909-860a-4b2b-977e-6fcdc2089de0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:24:05 crc kubenswrapper[4710]: I1009 09:24:05.394092 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "10c76909-860a-4b2b-977e-6fcdc2089de0" (UID: "10c76909-860a-4b2b-977e-6fcdc2089de0"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:24:05 crc kubenswrapper[4710]: I1009 09:24:05.394139 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "10c76909-860a-4b2b-977e-6fcdc2089de0" (UID: "10c76909-860a-4b2b-977e-6fcdc2089de0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:24:05 crc kubenswrapper[4710]: I1009 09:24:05.394873 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "10c76909-860a-4b2b-977e-6fcdc2089de0" (UID: "10c76909-860a-4b2b-977e-6fcdc2089de0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:24:05 crc kubenswrapper[4710]: I1009 09:24:05.449978 4710 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 09 09:24:05 crc kubenswrapper[4710]: I1009 09:24:05.450007 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 09:24:05 crc kubenswrapper[4710]: I1009 09:24:05.450018 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 09:24:05 crc kubenswrapper[4710]: I1009 09:24:05.450030 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-spq48\" (UniqueName: \"kubernetes.io/projected/10c76909-860a-4b2b-977e-6fcdc2089de0-kube-api-access-spq48\") on node \"crc\" DevicePath \"\"" Oct 09 09:24:05 crc kubenswrapper[4710]: I1009 09:24:05.450042 4710 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 09:24:05 crc kubenswrapper[4710]: I1009 09:24:05.450051 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10c76909-860a-4b2b-977e-6fcdc2089de0-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:24:06 crc kubenswrapper[4710]: I1009 09:24:06.192856 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" event={"ID":"10c76909-860a-4b2b-977e-6fcdc2089de0","Type":"ContainerDied","Data":"8ff2c0476b4470d2d459a88dc8a40e2b16d43119cd0758c555e8e2fadaead7d9"} Oct 09 09:24:06 crc kubenswrapper[4710]: I1009 09:24:06.193277 4710 scope.go:117] "RemoveContainer" containerID="db9c8fe846fc4af9ca0d534c008950318e956f9fd02d954ee17a2d1fdaa92af5" Oct 09 09:24:06 crc kubenswrapper[4710]: I1009 09:24:06.192953 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-64fb5d8fd7-cw48d" Oct 09 09:24:06 crc kubenswrapper[4710]: I1009 09:24:06.231223 4710 scope.go:117] "RemoveContainer" containerID="325e758f33ebdc9316229e6077f215f59497bb12d5c519e3ac59510e2824a5fe" Oct 09 09:24:06 crc kubenswrapper[4710]: I1009 09:24:06.241030 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-64fb5d8fd7-cw48d"] Oct 09 09:24:06 crc kubenswrapper[4710]: I1009 09:24:06.248716 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-64fb5d8fd7-cw48d"] Oct 09 09:24:06 crc kubenswrapper[4710]: I1009 09:24:06.824937 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10c76909-860a-4b2b-977e-6fcdc2089de0" path="/var/lib/kubelet/pods/10c76909-860a-4b2b-977e-6fcdc2089de0/volumes" Oct 09 09:24:14 crc kubenswrapper[4710]: I1009 09:24:14.852229 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl"] Oct 09 09:24:14 crc kubenswrapper[4710]: E1009 09:24:14.852989 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10c76909-860a-4b2b-977e-6fcdc2089de0" containerName="init" Oct 09 09:24:14 crc kubenswrapper[4710]: I1009 09:24:14.853003 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="10c76909-860a-4b2b-977e-6fcdc2089de0" containerName="init" Oct 09 09:24:14 crc kubenswrapper[4710]: E1009 09:24:14.853045 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b304fcec-3561-4932-b04d-5e04c64fbc7c" containerName="dnsmasq-dns" Oct 09 09:24:14 crc kubenswrapper[4710]: I1009 09:24:14.853051 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="b304fcec-3561-4932-b04d-5e04c64fbc7c" containerName="dnsmasq-dns" Oct 09 09:24:14 crc kubenswrapper[4710]: E1009 09:24:14.853061 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b304fcec-3561-4932-b04d-5e04c64fbc7c" containerName="init" Oct 09 09:24:14 crc kubenswrapper[4710]: I1009 09:24:14.853066 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="b304fcec-3561-4932-b04d-5e04c64fbc7c" containerName="init" Oct 09 09:24:14 crc kubenswrapper[4710]: E1009 09:24:14.853075 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10c76909-860a-4b2b-977e-6fcdc2089de0" containerName="dnsmasq-dns" Oct 09 09:24:14 crc kubenswrapper[4710]: I1009 09:24:14.853080 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="10c76909-860a-4b2b-977e-6fcdc2089de0" containerName="dnsmasq-dns" Oct 09 09:24:14 crc kubenswrapper[4710]: I1009 09:24:14.853274 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="10c76909-860a-4b2b-977e-6fcdc2089de0" containerName="dnsmasq-dns" Oct 09 09:24:14 crc kubenswrapper[4710]: I1009 09:24:14.853284 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="b304fcec-3561-4932-b04d-5e04c64fbc7c" containerName="dnsmasq-dns" Oct 09 09:24:14 crc kubenswrapper[4710]: I1009 09:24:14.854096 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl" Oct 09 09:24:14 crc kubenswrapper[4710]: I1009 09:24:14.860512 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:24:14 crc kubenswrapper[4710]: I1009 09:24:14.860535 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:24:14 crc kubenswrapper[4710]: I1009 09:24:14.860513 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:24:14 crc kubenswrapper[4710]: I1009 09:24:14.861039 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:24:14 crc kubenswrapper[4710]: I1009 09:24:14.872146 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl"] Oct 09 09:24:14 crc kubenswrapper[4710]: I1009 09:24:14.920155 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1fbed35b-0bc2-4811-bb62-64d295aad22f-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl\" (UID: \"1fbed35b-0bc2-4811-bb62-64d295aad22f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl" Oct 09 09:24:14 crc kubenswrapper[4710]: I1009 09:24:14.920200 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fbed35b-0bc2-4811-bb62-64d295aad22f-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl\" (UID: \"1fbed35b-0bc2-4811-bb62-64d295aad22f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl" Oct 09 09:24:14 crc kubenswrapper[4710]: I1009 09:24:14.920249 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fbed35b-0bc2-4811-bb62-64d295aad22f-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl\" (UID: \"1fbed35b-0bc2-4811-bb62-64d295aad22f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl" Oct 09 09:24:14 crc kubenswrapper[4710]: I1009 09:24:14.920305 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dg27m\" (UniqueName: \"kubernetes.io/projected/1fbed35b-0bc2-4811-bb62-64d295aad22f-kube-api-access-dg27m\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl\" (UID: \"1fbed35b-0bc2-4811-bb62-64d295aad22f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl" Oct 09 09:24:15 crc kubenswrapper[4710]: I1009 09:24:15.021752 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dg27m\" (UniqueName: \"kubernetes.io/projected/1fbed35b-0bc2-4811-bb62-64d295aad22f-kube-api-access-dg27m\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl\" (UID: \"1fbed35b-0bc2-4811-bb62-64d295aad22f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl" Oct 09 09:24:15 crc kubenswrapper[4710]: I1009 09:24:15.021864 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1fbed35b-0bc2-4811-bb62-64d295aad22f-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl\" (UID: \"1fbed35b-0bc2-4811-bb62-64d295aad22f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl" Oct 09 09:24:15 crc kubenswrapper[4710]: I1009 09:24:15.021893 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fbed35b-0bc2-4811-bb62-64d295aad22f-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl\" (UID: \"1fbed35b-0bc2-4811-bb62-64d295aad22f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl" Oct 09 09:24:15 crc kubenswrapper[4710]: I1009 09:24:15.021930 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fbed35b-0bc2-4811-bb62-64d295aad22f-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl\" (UID: \"1fbed35b-0bc2-4811-bb62-64d295aad22f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl" Oct 09 09:24:15 crc kubenswrapper[4710]: I1009 09:24:15.027365 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fbed35b-0bc2-4811-bb62-64d295aad22f-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl\" (UID: \"1fbed35b-0bc2-4811-bb62-64d295aad22f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl" Oct 09 09:24:15 crc kubenswrapper[4710]: I1009 09:24:15.027389 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1fbed35b-0bc2-4811-bb62-64d295aad22f-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl\" (UID: \"1fbed35b-0bc2-4811-bb62-64d295aad22f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl" Oct 09 09:24:15 crc kubenswrapper[4710]: I1009 09:24:15.027970 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fbed35b-0bc2-4811-bb62-64d295aad22f-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl\" (UID: \"1fbed35b-0bc2-4811-bb62-64d295aad22f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl" Oct 09 09:24:15 crc kubenswrapper[4710]: I1009 09:24:15.035976 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dg27m\" (UniqueName: \"kubernetes.io/projected/1fbed35b-0bc2-4811-bb62-64d295aad22f-kube-api-access-dg27m\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl\" (UID: \"1fbed35b-0bc2-4811-bb62-64d295aad22f\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl" Oct 09 09:24:15 crc kubenswrapper[4710]: I1009 09:24:15.175784 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl" Oct 09 09:24:15 crc kubenswrapper[4710]: I1009 09:24:15.640544 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl"] Oct 09 09:24:15 crc kubenswrapper[4710]: I1009 09:24:15.658902 4710 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 09:24:16 crc kubenswrapper[4710]: I1009 09:24:16.275734 4710 generic.go:334] "Generic (PLEG): container finished" podID="818b872a-e3f5-475f-ac6d-99810ac2f39b" containerID="df8f014f00108c3c5963e1323801f883146e76d01074603074e0fe8104fabd9f" exitCode=0 Oct 09 09:24:16 crc kubenswrapper[4710]: I1009 09:24:16.275831 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"818b872a-e3f5-475f-ac6d-99810ac2f39b","Type":"ContainerDied","Data":"df8f014f00108c3c5963e1323801f883146e76d01074603074e0fe8104fabd9f"} Oct 09 09:24:16 crc kubenswrapper[4710]: I1009 09:24:16.278011 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl" event={"ID":"1fbed35b-0bc2-4811-bb62-64d295aad22f","Type":"ContainerStarted","Data":"d00e4c4d242b9b67ee16feba840d9bc4bf9b5ed24a36cd4fafa4d70a4af0566c"} Oct 09 09:24:17 crc kubenswrapper[4710]: I1009 09:24:17.294767 4710 generic.go:334] "Generic (PLEG): container finished" podID="3011d32f-6110-456d-a247-f6298b1d46e3" containerID="db6c9cfcdedea7743fb53e343d2c330d74408bb509be72aa400bf70d369e8198" exitCode=0 Oct 09 09:24:17 crc kubenswrapper[4710]: I1009 09:24:17.294794 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3011d32f-6110-456d-a247-f6298b1d46e3","Type":"ContainerDied","Data":"db6c9cfcdedea7743fb53e343d2c330d74408bb509be72aa400bf70d369e8198"} Oct 09 09:24:17 crc kubenswrapper[4710]: I1009 09:24:17.300836 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"818b872a-e3f5-475f-ac6d-99810ac2f39b","Type":"ContainerStarted","Data":"7788315e9e46fc3165874db78b48df77766788225c66e9eb3fd1163836e5e5e0"} Oct 09 09:24:17 crc kubenswrapper[4710]: I1009 09:24:17.301400 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 09 09:24:17 crc kubenswrapper[4710]: I1009 09:24:17.347175 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.347155429 podStartE2EDuration="36.347155429s" podCreationTimestamp="2025-10-09 09:23:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:24:17.345127925 +0000 UTC m=+1180.835236322" watchObservedRunningTime="2025-10-09 09:24:17.347155429 +0000 UTC m=+1180.837263827" Oct 09 09:24:18 crc kubenswrapper[4710]: I1009 09:24:18.315611 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"3011d32f-6110-456d-a247-f6298b1d46e3","Type":"ContainerStarted","Data":"707743c9df804c7f5d200a8f07bbad4e795e9f97aa3e26aac317ed87a0ee1209"} Oct 09 09:24:18 crc kubenswrapper[4710]: I1009 09:24:18.315921 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:24:18 crc kubenswrapper[4710]: I1009 09:24:18.356872 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.356844672 podStartE2EDuration="36.356844672s" podCreationTimestamp="2025-10-09 09:23:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:24:18.34702031 +0000 UTC m=+1181.837128708" watchObservedRunningTime="2025-10-09 09:24:18.356844672 +0000 UTC m=+1181.846953059" Oct 09 09:24:25 crc kubenswrapper[4710]: I1009 09:24:25.389121 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl" event={"ID":"1fbed35b-0bc2-4811-bb62-64d295aad22f","Type":"ContainerStarted","Data":"ddf0fb04012808f75eaffae1f03e60c0231b9dd7fde25bf11294b92cdc4b5184"} Oct 09 09:24:25 crc kubenswrapper[4710]: I1009 09:24:25.408318 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl" podStartSLOduration=2.122972044 podStartE2EDuration="11.408301986s" podCreationTimestamp="2025-10-09 09:24:14 +0000 UTC" firstStartedPulling="2025-10-09 09:24:15.658668742 +0000 UTC m=+1179.148777139" lastFinishedPulling="2025-10-09 09:24:24.943998685 +0000 UTC m=+1188.434107081" observedRunningTime="2025-10-09 09:24:25.403801566 +0000 UTC m=+1188.893909963" watchObservedRunningTime="2025-10-09 09:24:25.408301986 +0000 UTC m=+1188.898410383" Oct 09 09:24:32 crc kubenswrapper[4710]: I1009 09:24:32.356752 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 09 09:24:33 crc kubenswrapper[4710]: I1009 09:24:33.673688 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 09 09:24:36 crc kubenswrapper[4710]: I1009 09:24:36.467930 4710 generic.go:334] "Generic (PLEG): container finished" podID="1fbed35b-0bc2-4811-bb62-64d295aad22f" containerID="ddf0fb04012808f75eaffae1f03e60c0231b9dd7fde25bf11294b92cdc4b5184" exitCode=0 Oct 09 09:24:36 crc kubenswrapper[4710]: I1009 09:24:36.468007 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl" event={"ID":"1fbed35b-0bc2-4811-bb62-64d295aad22f","Type":"ContainerDied","Data":"ddf0fb04012808f75eaffae1f03e60c0231b9dd7fde25bf11294b92cdc4b5184"} Oct 09 09:24:37 crc kubenswrapper[4710]: I1009 09:24:37.795885 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl" Oct 09 09:24:37 crc kubenswrapper[4710]: I1009 09:24:37.913838 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1fbed35b-0bc2-4811-bb62-64d295aad22f-ssh-key\") pod \"1fbed35b-0bc2-4811-bb62-64d295aad22f\" (UID: \"1fbed35b-0bc2-4811-bb62-64d295aad22f\") " Oct 09 09:24:37 crc kubenswrapper[4710]: I1009 09:24:37.913897 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fbed35b-0bc2-4811-bb62-64d295aad22f-repo-setup-combined-ca-bundle\") pod \"1fbed35b-0bc2-4811-bb62-64d295aad22f\" (UID: \"1fbed35b-0bc2-4811-bb62-64d295aad22f\") " Oct 09 09:24:37 crc kubenswrapper[4710]: I1009 09:24:37.914075 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fbed35b-0bc2-4811-bb62-64d295aad22f-inventory\") pod \"1fbed35b-0bc2-4811-bb62-64d295aad22f\" (UID: \"1fbed35b-0bc2-4811-bb62-64d295aad22f\") " Oct 09 09:24:37 crc kubenswrapper[4710]: I1009 09:24:37.914095 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dg27m\" (UniqueName: \"kubernetes.io/projected/1fbed35b-0bc2-4811-bb62-64d295aad22f-kube-api-access-dg27m\") pod \"1fbed35b-0bc2-4811-bb62-64d295aad22f\" (UID: \"1fbed35b-0bc2-4811-bb62-64d295aad22f\") " Oct 09 09:24:37 crc kubenswrapper[4710]: I1009 09:24:37.920195 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fbed35b-0bc2-4811-bb62-64d295aad22f-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "1fbed35b-0bc2-4811-bb62-64d295aad22f" (UID: "1fbed35b-0bc2-4811-bb62-64d295aad22f"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:24:37 crc kubenswrapper[4710]: I1009 09:24:37.920614 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fbed35b-0bc2-4811-bb62-64d295aad22f-kube-api-access-dg27m" (OuterVolumeSpecName: "kube-api-access-dg27m") pod "1fbed35b-0bc2-4811-bb62-64d295aad22f" (UID: "1fbed35b-0bc2-4811-bb62-64d295aad22f"). InnerVolumeSpecName "kube-api-access-dg27m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:24:37 crc kubenswrapper[4710]: I1009 09:24:37.934793 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fbed35b-0bc2-4811-bb62-64d295aad22f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1fbed35b-0bc2-4811-bb62-64d295aad22f" (UID: "1fbed35b-0bc2-4811-bb62-64d295aad22f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:24:37 crc kubenswrapper[4710]: I1009 09:24:37.935343 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fbed35b-0bc2-4811-bb62-64d295aad22f-inventory" (OuterVolumeSpecName: "inventory") pod "1fbed35b-0bc2-4811-bb62-64d295aad22f" (UID: "1fbed35b-0bc2-4811-bb62-64d295aad22f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.016499 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fbed35b-0bc2-4811-bb62-64d295aad22f-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.016528 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dg27m\" (UniqueName: \"kubernetes.io/projected/1fbed35b-0bc2-4811-bb62-64d295aad22f-kube-api-access-dg27m\") on node \"crc\" DevicePath \"\"" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.016540 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1fbed35b-0bc2-4811-bb62-64d295aad22f-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.016549 4710 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fbed35b-0bc2-4811-bb62-64d295aad22f-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.483393 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl" event={"ID":"1fbed35b-0bc2-4811-bb62-64d295aad22f","Type":"ContainerDied","Data":"d00e4c4d242b9b67ee16feba840d9bc4bf9b5ed24a36cd4fafa4d70a4af0566c"} Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.483448 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d00e4c4d242b9b67ee16feba840d9bc4bf9b5ed24a36cd4fafa4d70a4af0566c" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.483499 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.543181 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb"] Oct 09 09:24:38 crc kubenswrapper[4710]: E1009 09:24:38.543510 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fbed35b-0bc2-4811-bb62-64d295aad22f" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.543525 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fbed35b-0bc2-4811-bb62-64d295aad22f" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.543770 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fbed35b-0bc2-4811-bb62-64d295aad22f" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.544385 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.552571 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb"] Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.554085 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.554288 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.554524 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.555722 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.729822 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb\" (UID: \"d476761d-c8c2-4c6a-9c2f-31ab12f8a403\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.729877 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wm5jl\" (UniqueName: \"kubernetes.io/projected/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-kube-api-access-wm5jl\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb\" (UID: \"d476761d-c8c2-4c6a-9c2f-31ab12f8a403\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.729911 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb\" (UID: \"d476761d-c8c2-4c6a-9c2f-31ab12f8a403\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.730055 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb\" (UID: \"d476761d-c8c2-4c6a-9c2f-31ab12f8a403\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.831137 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb\" (UID: \"d476761d-c8c2-4c6a-9c2f-31ab12f8a403\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.832055 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb\" (UID: \"d476761d-c8c2-4c6a-9c2f-31ab12f8a403\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.832201 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb\" (UID: \"d476761d-c8c2-4c6a-9c2f-31ab12f8a403\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.832238 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wm5jl\" (UniqueName: \"kubernetes.io/projected/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-kube-api-access-wm5jl\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb\" (UID: \"d476761d-c8c2-4c6a-9c2f-31ab12f8a403\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.835321 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb\" (UID: \"d476761d-c8c2-4c6a-9c2f-31ab12f8a403\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.836077 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb\" (UID: \"d476761d-c8c2-4c6a-9c2f-31ab12f8a403\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.836248 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb\" (UID: \"d476761d-c8c2-4c6a-9c2f-31ab12f8a403\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.845865 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wm5jl\" (UniqueName: \"kubernetes.io/projected/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-kube-api-access-wm5jl\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb\" (UID: \"d476761d-c8c2-4c6a-9c2f-31ab12f8a403\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb" Oct 09 09:24:38 crc kubenswrapper[4710]: I1009 09:24:38.857700 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb" Oct 09 09:24:39 crc kubenswrapper[4710]: I1009 09:24:39.277557 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb"] Oct 09 09:24:39 crc kubenswrapper[4710]: I1009 09:24:39.493051 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb" event={"ID":"d476761d-c8c2-4c6a-9c2f-31ab12f8a403","Type":"ContainerStarted","Data":"be1d729339147050a4d1f9b72e5e0e3091f5e23ba0ea6cbc8601534c9b78d595"} Oct 09 09:24:40 crc kubenswrapper[4710]: I1009 09:24:40.502879 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb" event={"ID":"d476761d-c8c2-4c6a-9c2f-31ab12f8a403","Type":"ContainerStarted","Data":"d403c8eeba60a023a48e371857ab63ec49fb33a231d30911942ea4b96b45025b"} Oct 09 09:24:40 crc kubenswrapper[4710]: I1009 09:24:40.520736 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb" podStartSLOduration=1.9329939710000001 podStartE2EDuration="2.520721357s" podCreationTimestamp="2025-10-09 09:24:38 +0000 UTC" firstStartedPulling="2025-10-09 09:24:39.283009944 +0000 UTC m=+1202.773118340" lastFinishedPulling="2025-10-09 09:24:39.87073733 +0000 UTC m=+1203.360845726" observedRunningTime="2025-10-09 09:24:40.518250476 +0000 UTC m=+1204.008358873" watchObservedRunningTime="2025-10-09 09:24:40.520721357 +0000 UTC m=+1204.010829755" Oct 09 09:25:35 crc kubenswrapper[4710]: I1009 09:25:35.546168 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:25:35 crc kubenswrapper[4710]: I1009 09:25:35.546765 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:25:46 crc kubenswrapper[4710]: I1009 09:25:46.292468 4710 scope.go:117] "RemoveContainer" containerID="995b1658d5302a0ac1efc1491ddf0fa156bbe9c56cef16ae0e85c8ee3f8e81c5" Oct 09 09:26:05 crc kubenswrapper[4710]: I1009 09:26:05.545897 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:26:05 crc kubenswrapper[4710]: I1009 09:26:05.546359 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:26:35 crc kubenswrapper[4710]: I1009 09:26:35.546223 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:26:35 crc kubenswrapper[4710]: I1009 09:26:35.546803 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:26:35 crc kubenswrapper[4710]: I1009 09:26:35.546842 4710 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:26:35 crc kubenswrapper[4710]: I1009 09:26:35.547833 4710 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"45c1c663886c1898255d44177db40405bdd2ff57beb008c51e435da6768e1c12"} pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 09:26:35 crc kubenswrapper[4710]: I1009 09:26:35.547892 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" containerID="cri-o://45c1c663886c1898255d44177db40405bdd2ff57beb008c51e435da6768e1c12" gracePeriod=600 Oct 09 09:26:36 crc kubenswrapper[4710]: I1009 09:26:36.382412 4710 generic.go:334] "Generic (PLEG): container finished" podID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerID="45c1c663886c1898255d44177db40405bdd2ff57beb008c51e435da6768e1c12" exitCode=0 Oct 09 09:26:36 crc kubenswrapper[4710]: I1009 09:26:36.382456 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerDied","Data":"45c1c663886c1898255d44177db40405bdd2ff57beb008c51e435da6768e1c12"} Oct 09 09:26:36 crc kubenswrapper[4710]: I1009 09:26:36.382815 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerStarted","Data":"d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179"} Oct 09 09:26:36 crc kubenswrapper[4710]: I1009 09:26:36.382835 4710 scope.go:117] "RemoveContainer" containerID="d29bcc87c0210ac354834c57bbe9818a4507c034cce7af695fa8a4fd8067649a" Oct 09 09:26:46 crc kubenswrapper[4710]: I1009 09:26:46.356406 4710 scope.go:117] "RemoveContainer" containerID="c3e837075ab7aca4ed7966170ec68b220aa7b909c7b9372b457ca94f1d8fcc34" Oct 09 09:26:46 crc kubenswrapper[4710]: I1009 09:26:46.373614 4710 scope.go:117] "RemoveContainer" containerID="70c76449ffa14657d27d832fb38a8026d5e3db5054835540b70e8b5d694ad6e7" Oct 09 09:26:46 crc kubenswrapper[4710]: I1009 09:26:46.391628 4710 scope.go:117] "RemoveContainer" containerID="3fa7718185235f8cbfbd51f1953b57efb9f226c640f87ac4e069445cc1143fe3" Oct 09 09:26:46 crc kubenswrapper[4710]: I1009 09:26:46.419777 4710 scope.go:117] "RemoveContainer" containerID="acc54a4a3f2f93c817365f7d6b06460895f262559e67eb200ec7ffbcc2b25926" Oct 09 09:26:46 crc kubenswrapper[4710]: I1009 09:26:46.435618 4710 scope.go:117] "RemoveContainer" containerID="7e8d759790222815df192950d1161c0b963485de496ebb8f8ff14a7af3befb8e" Oct 09 09:27:41 crc kubenswrapper[4710]: I1009 09:27:41.536230 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-smgpf"] Oct 09 09:27:41 crc kubenswrapper[4710]: I1009 09:27:41.538170 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-smgpf" Oct 09 09:27:41 crc kubenswrapper[4710]: I1009 09:27:41.554419 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-smgpf"] Oct 09 09:27:41 crc kubenswrapper[4710]: I1009 09:27:41.585883 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65ac81c2-6839-4a1f-955a-aaac457cc800-catalog-content\") pod \"community-operators-smgpf\" (UID: \"65ac81c2-6839-4a1f-955a-aaac457cc800\") " pod="openshift-marketplace/community-operators-smgpf" Oct 09 09:27:41 crc kubenswrapper[4710]: I1009 09:27:41.585940 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2259k\" (UniqueName: \"kubernetes.io/projected/65ac81c2-6839-4a1f-955a-aaac457cc800-kube-api-access-2259k\") pod \"community-operators-smgpf\" (UID: \"65ac81c2-6839-4a1f-955a-aaac457cc800\") " pod="openshift-marketplace/community-operators-smgpf" Oct 09 09:27:41 crc kubenswrapper[4710]: I1009 09:27:41.585991 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65ac81c2-6839-4a1f-955a-aaac457cc800-utilities\") pod \"community-operators-smgpf\" (UID: \"65ac81c2-6839-4a1f-955a-aaac457cc800\") " pod="openshift-marketplace/community-operators-smgpf" Oct 09 09:27:41 crc kubenswrapper[4710]: I1009 09:27:41.687274 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65ac81c2-6839-4a1f-955a-aaac457cc800-catalog-content\") pod \"community-operators-smgpf\" (UID: \"65ac81c2-6839-4a1f-955a-aaac457cc800\") " pod="openshift-marketplace/community-operators-smgpf" Oct 09 09:27:41 crc kubenswrapper[4710]: I1009 09:27:41.687352 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2259k\" (UniqueName: \"kubernetes.io/projected/65ac81c2-6839-4a1f-955a-aaac457cc800-kube-api-access-2259k\") pod \"community-operators-smgpf\" (UID: \"65ac81c2-6839-4a1f-955a-aaac457cc800\") " pod="openshift-marketplace/community-operators-smgpf" Oct 09 09:27:41 crc kubenswrapper[4710]: I1009 09:27:41.687400 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65ac81c2-6839-4a1f-955a-aaac457cc800-utilities\") pod \"community-operators-smgpf\" (UID: \"65ac81c2-6839-4a1f-955a-aaac457cc800\") " pod="openshift-marketplace/community-operators-smgpf" Oct 09 09:27:41 crc kubenswrapper[4710]: I1009 09:27:41.687782 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65ac81c2-6839-4a1f-955a-aaac457cc800-catalog-content\") pod \"community-operators-smgpf\" (UID: \"65ac81c2-6839-4a1f-955a-aaac457cc800\") " pod="openshift-marketplace/community-operators-smgpf" Oct 09 09:27:41 crc kubenswrapper[4710]: I1009 09:27:41.687901 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65ac81c2-6839-4a1f-955a-aaac457cc800-utilities\") pod \"community-operators-smgpf\" (UID: \"65ac81c2-6839-4a1f-955a-aaac457cc800\") " pod="openshift-marketplace/community-operators-smgpf" Oct 09 09:27:41 crc kubenswrapper[4710]: I1009 09:27:41.704509 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2259k\" (UniqueName: \"kubernetes.io/projected/65ac81c2-6839-4a1f-955a-aaac457cc800-kube-api-access-2259k\") pod \"community-operators-smgpf\" (UID: \"65ac81c2-6839-4a1f-955a-aaac457cc800\") " pod="openshift-marketplace/community-operators-smgpf" Oct 09 09:27:41 crc kubenswrapper[4710]: I1009 09:27:41.852556 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-smgpf" Oct 09 09:27:42 crc kubenswrapper[4710]: I1009 09:27:42.353005 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-smgpf"] Oct 09 09:27:42 crc kubenswrapper[4710]: I1009 09:27:42.937855 4710 generic.go:334] "Generic (PLEG): container finished" podID="65ac81c2-6839-4a1f-955a-aaac457cc800" containerID="6b246818651c2cef702304975dbc3e7b5c20bcc9edcff742a08b54dedf7ce68d" exitCode=0 Oct 09 09:27:42 crc kubenswrapper[4710]: I1009 09:27:42.938057 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-smgpf" event={"ID":"65ac81c2-6839-4a1f-955a-aaac457cc800","Type":"ContainerDied","Data":"6b246818651c2cef702304975dbc3e7b5c20bcc9edcff742a08b54dedf7ce68d"} Oct 09 09:27:42 crc kubenswrapper[4710]: I1009 09:27:42.938079 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-smgpf" event={"ID":"65ac81c2-6839-4a1f-955a-aaac457cc800","Type":"ContainerStarted","Data":"3b16166431b8c5d1bfc5433a690578473fac3e5e0bed63dc77494ad7aee2b461"} Oct 09 09:27:44 crc kubenswrapper[4710]: I1009 09:27:44.951072 4710 generic.go:334] "Generic (PLEG): container finished" podID="65ac81c2-6839-4a1f-955a-aaac457cc800" containerID="5509c97437ee30327a55319872b8f2952f8d032bfe8234c3d423f206a40fd7b2" exitCode=0 Oct 09 09:27:44 crc kubenswrapper[4710]: I1009 09:27:44.951167 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-smgpf" event={"ID":"65ac81c2-6839-4a1f-955a-aaac457cc800","Type":"ContainerDied","Data":"5509c97437ee30327a55319872b8f2952f8d032bfe8234c3d423f206a40fd7b2"} Oct 09 09:27:45 crc kubenswrapper[4710]: I1009 09:27:45.959968 4710 generic.go:334] "Generic (PLEG): container finished" podID="d476761d-c8c2-4c6a-9c2f-31ab12f8a403" containerID="d403c8eeba60a023a48e371857ab63ec49fb33a231d30911942ea4b96b45025b" exitCode=0 Oct 09 09:27:45 crc kubenswrapper[4710]: I1009 09:27:45.960050 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb" event={"ID":"d476761d-c8c2-4c6a-9c2f-31ab12f8a403","Type":"ContainerDied","Data":"d403c8eeba60a023a48e371857ab63ec49fb33a231d30911942ea4b96b45025b"} Oct 09 09:27:45 crc kubenswrapper[4710]: I1009 09:27:45.962211 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-smgpf" event={"ID":"65ac81c2-6839-4a1f-955a-aaac457cc800","Type":"ContainerStarted","Data":"8bddc07856a3a8940ab06cbc42df9ee78e2e0a3ae479f064df9c6c3cde05d422"} Oct 09 09:27:45 crc kubenswrapper[4710]: I1009 09:27:45.989673 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-smgpf" podStartSLOduration=2.503647928 podStartE2EDuration="4.989658232s" podCreationTimestamp="2025-10-09 09:27:41 +0000 UTC" firstStartedPulling="2025-10-09 09:27:42.939310573 +0000 UTC m=+1386.429418970" lastFinishedPulling="2025-10-09 09:27:45.425320888 +0000 UTC m=+1388.915429274" observedRunningTime="2025-10-09 09:27:45.986714721 +0000 UTC m=+1389.476823119" watchObservedRunningTime="2025-10-09 09:27:45.989658232 +0000 UTC m=+1389.479766629" Oct 09 09:27:46 crc kubenswrapper[4710]: I1009 09:27:46.499283 4710 scope.go:117] "RemoveContainer" containerID="d53c1ecaa60a9a8b9724de2b7a0ee97b6cf6e6063bf388a15861c2e018804e4e" Oct 09 09:27:47 crc kubenswrapper[4710]: I1009 09:27:47.307023 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb" Oct 09 09:27:47 crc kubenswrapper[4710]: I1009 09:27:47.487170 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-ssh-key\") pod \"d476761d-c8c2-4c6a-9c2f-31ab12f8a403\" (UID: \"d476761d-c8c2-4c6a-9c2f-31ab12f8a403\") " Oct 09 09:27:47 crc kubenswrapper[4710]: I1009 09:27:47.487395 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-inventory\") pod \"d476761d-c8c2-4c6a-9c2f-31ab12f8a403\" (UID: \"d476761d-c8c2-4c6a-9c2f-31ab12f8a403\") " Oct 09 09:27:47 crc kubenswrapper[4710]: I1009 09:27:47.487577 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wm5jl\" (UniqueName: \"kubernetes.io/projected/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-kube-api-access-wm5jl\") pod \"d476761d-c8c2-4c6a-9c2f-31ab12f8a403\" (UID: \"d476761d-c8c2-4c6a-9c2f-31ab12f8a403\") " Oct 09 09:27:47 crc kubenswrapper[4710]: I1009 09:27:47.487633 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-bootstrap-combined-ca-bundle\") pod \"d476761d-c8c2-4c6a-9c2f-31ab12f8a403\" (UID: \"d476761d-c8c2-4c6a-9c2f-31ab12f8a403\") " Oct 09 09:27:47 crc kubenswrapper[4710]: I1009 09:27:47.495251 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-kube-api-access-wm5jl" (OuterVolumeSpecName: "kube-api-access-wm5jl") pod "d476761d-c8c2-4c6a-9c2f-31ab12f8a403" (UID: "d476761d-c8c2-4c6a-9c2f-31ab12f8a403"). InnerVolumeSpecName "kube-api-access-wm5jl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:27:47 crc kubenswrapper[4710]: I1009 09:27:47.501572 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "d476761d-c8c2-4c6a-9c2f-31ab12f8a403" (UID: "d476761d-c8c2-4c6a-9c2f-31ab12f8a403"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:27:47 crc kubenswrapper[4710]: I1009 09:27:47.517717 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d476761d-c8c2-4c6a-9c2f-31ab12f8a403" (UID: "d476761d-c8c2-4c6a-9c2f-31ab12f8a403"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:27:47 crc kubenswrapper[4710]: I1009 09:27:47.533368 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-inventory" (OuterVolumeSpecName: "inventory") pod "d476761d-c8c2-4c6a-9c2f-31ab12f8a403" (UID: "d476761d-c8c2-4c6a-9c2f-31ab12f8a403"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:27:47 crc kubenswrapper[4710]: I1009 09:27:47.590330 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:27:47 crc kubenswrapper[4710]: I1009 09:27:47.590361 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wm5jl\" (UniqueName: \"kubernetes.io/projected/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-kube-api-access-wm5jl\") on node \"crc\" DevicePath \"\"" Oct 09 09:27:47 crc kubenswrapper[4710]: I1009 09:27:47.590377 4710 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:27:47 crc kubenswrapper[4710]: I1009 09:27:47.590386 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d476761d-c8c2-4c6a-9c2f-31ab12f8a403-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:27:47 crc kubenswrapper[4710]: I1009 09:27:47.979356 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb" event={"ID":"d476761d-c8c2-4c6a-9c2f-31ab12f8a403","Type":"ContainerDied","Data":"be1d729339147050a4d1f9b72e5e0e3091f5e23ba0ea6cbc8601534c9b78d595"} Oct 09 09:27:47 crc kubenswrapper[4710]: I1009 09:27:47.979392 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be1d729339147050a4d1f9b72e5e0e3091f5e23ba0ea6cbc8601534c9b78d595" Oct 09 09:27:47 crc kubenswrapper[4710]: I1009 09:27:47.979675 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb" Oct 09 09:27:48 crc kubenswrapper[4710]: I1009 09:27:48.040726 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj"] Oct 09 09:27:48 crc kubenswrapper[4710]: E1009 09:27:48.041206 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d476761d-c8c2-4c6a-9c2f-31ab12f8a403" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 09 09:27:48 crc kubenswrapper[4710]: I1009 09:27:48.041294 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="d476761d-c8c2-4c6a-9c2f-31ab12f8a403" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 09 09:27:48 crc kubenswrapper[4710]: I1009 09:27:48.041545 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="d476761d-c8c2-4c6a-9c2f-31ab12f8a403" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 09 09:27:48 crc kubenswrapper[4710]: I1009 09:27:48.042098 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj" Oct 09 09:27:48 crc kubenswrapper[4710]: I1009 09:27:48.043865 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:27:48 crc kubenswrapper[4710]: I1009 09:27:48.044703 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:27:48 crc kubenswrapper[4710]: I1009 09:27:48.045036 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:27:48 crc kubenswrapper[4710]: I1009 09:27:48.053199 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj"] Oct 09 09:27:48 crc kubenswrapper[4710]: I1009 09:27:48.056807 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:27:48 crc kubenswrapper[4710]: I1009 09:27:48.200050 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lpmg\" (UniqueName: \"kubernetes.io/projected/e85c0fdc-926b-4a34-9578-b19fd827b749-kube-api-access-6lpmg\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-th4cj\" (UID: \"e85c0fdc-926b-4a34-9578-b19fd827b749\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj" Oct 09 09:27:48 crc kubenswrapper[4710]: I1009 09:27:48.201182 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e85c0fdc-926b-4a34-9578-b19fd827b749-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-th4cj\" (UID: \"e85c0fdc-926b-4a34-9578-b19fd827b749\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj" Oct 09 09:27:48 crc kubenswrapper[4710]: I1009 09:27:48.201368 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e85c0fdc-926b-4a34-9578-b19fd827b749-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-th4cj\" (UID: \"e85c0fdc-926b-4a34-9578-b19fd827b749\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj" Oct 09 09:27:48 crc kubenswrapper[4710]: I1009 09:27:48.303922 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lpmg\" (UniqueName: \"kubernetes.io/projected/e85c0fdc-926b-4a34-9578-b19fd827b749-kube-api-access-6lpmg\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-th4cj\" (UID: \"e85c0fdc-926b-4a34-9578-b19fd827b749\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj" Oct 09 09:27:48 crc kubenswrapper[4710]: I1009 09:27:48.304041 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e85c0fdc-926b-4a34-9578-b19fd827b749-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-th4cj\" (UID: \"e85c0fdc-926b-4a34-9578-b19fd827b749\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj" Oct 09 09:27:48 crc kubenswrapper[4710]: I1009 09:27:48.304150 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e85c0fdc-926b-4a34-9578-b19fd827b749-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-th4cj\" (UID: \"e85c0fdc-926b-4a34-9578-b19fd827b749\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj" Oct 09 09:27:48 crc kubenswrapper[4710]: I1009 09:27:48.308595 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e85c0fdc-926b-4a34-9578-b19fd827b749-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-th4cj\" (UID: \"e85c0fdc-926b-4a34-9578-b19fd827b749\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj" Oct 09 09:27:48 crc kubenswrapper[4710]: I1009 09:27:48.310867 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e85c0fdc-926b-4a34-9578-b19fd827b749-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-th4cj\" (UID: \"e85c0fdc-926b-4a34-9578-b19fd827b749\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj" Oct 09 09:27:48 crc kubenswrapper[4710]: I1009 09:27:48.321158 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lpmg\" (UniqueName: \"kubernetes.io/projected/e85c0fdc-926b-4a34-9578-b19fd827b749-kube-api-access-6lpmg\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-th4cj\" (UID: \"e85c0fdc-926b-4a34-9578-b19fd827b749\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj" Oct 09 09:27:48 crc kubenswrapper[4710]: I1009 09:27:48.357913 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj" Oct 09 09:27:48 crc kubenswrapper[4710]: I1009 09:27:48.795214 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj"] Oct 09 09:27:48 crc kubenswrapper[4710]: I1009 09:27:48.988153 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj" event={"ID":"e85c0fdc-926b-4a34-9578-b19fd827b749","Type":"ContainerStarted","Data":"462fdbe3d932c65699d50670668489d2bc994687baf307e37867c3086e95a4fb"} Oct 09 09:27:49 crc kubenswrapper[4710]: I1009 09:27:49.995675 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj" event={"ID":"e85c0fdc-926b-4a34-9578-b19fd827b749","Type":"ContainerStarted","Data":"f362f78f15219982d6c9252adfe7e5fa3ff93433b0bd8f193416a426098612b6"} Oct 09 09:27:50 crc kubenswrapper[4710]: I1009 09:27:50.027524 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj" podStartSLOduration=1.396232048 podStartE2EDuration="2.027506534s" podCreationTimestamp="2025-10-09 09:27:48 +0000 UTC" firstStartedPulling="2025-10-09 09:27:48.802131424 +0000 UTC m=+1392.292239821" lastFinishedPulling="2025-10-09 09:27:49.43340591 +0000 UTC m=+1392.923514307" observedRunningTime="2025-10-09 09:27:50.026913716 +0000 UTC m=+1393.517022113" watchObservedRunningTime="2025-10-09 09:27:50.027506534 +0000 UTC m=+1393.517614932" Oct 09 09:27:51 crc kubenswrapper[4710]: I1009 09:27:51.853346 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-smgpf" Oct 09 09:27:51 crc kubenswrapper[4710]: I1009 09:27:51.854191 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-smgpf" Oct 09 09:27:51 crc kubenswrapper[4710]: I1009 09:27:51.895333 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-smgpf" Oct 09 09:27:52 crc kubenswrapper[4710]: I1009 09:27:52.043362 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-smgpf" Oct 09 09:27:52 crc kubenswrapper[4710]: I1009 09:27:52.120632 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-smgpf"] Oct 09 09:27:54 crc kubenswrapper[4710]: I1009 09:27:54.022745 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-smgpf" podUID="65ac81c2-6839-4a1f-955a-aaac457cc800" containerName="registry-server" containerID="cri-o://8bddc07856a3a8940ab06cbc42df9ee78e2e0a3ae479f064df9c6c3cde05d422" gracePeriod=2 Oct 09 09:27:54 crc kubenswrapper[4710]: I1009 09:27:54.369042 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-smgpf" Oct 09 09:27:54 crc kubenswrapper[4710]: I1009 09:27:54.519819 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65ac81c2-6839-4a1f-955a-aaac457cc800-catalog-content\") pod \"65ac81c2-6839-4a1f-955a-aaac457cc800\" (UID: \"65ac81c2-6839-4a1f-955a-aaac457cc800\") " Oct 09 09:27:54 crc kubenswrapper[4710]: I1009 09:27:54.519901 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2259k\" (UniqueName: \"kubernetes.io/projected/65ac81c2-6839-4a1f-955a-aaac457cc800-kube-api-access-2259k\") pod \"65ac81c2-6839-4a1f-955a-aaac457cc800\" (UID: \"65ac81c2-6839-4a1f-955a-aaac457cc800\") " Oct 09 09:27:54 crc kubenswrapper[4710]: I1009 09:27:54.520099 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65ac81c2-6839-4a1f-955a-aaac457cc800-utilities\") pod \"65ac81c2-6839-4a1f-955a-aaac457cc800\" (UID: \"65ac81c2-6839-4a1f-955a-aaac457cc800\") " Oct 09 09:27:54 crc kubenswrapper[4710]: I1009 09:27:54.520748 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65ac81c2-6839-4a1f-955a-aaac457cc800-utilities" (OuterVolumeSpecName: "utilities") pod "65ac81c2-6839-4a1f-955a-aaac457cc800" (UID: "65ac81c2-6839-4a1f-955a-aaac457cc800"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:27:54 crc kubenswrapper[4710]: I1009 09:27:54.524846 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65ac81c2-6839-4a1f-955a-aaac457cc800-kube-api-access-2259k" (OuterVolumeSpecName: "kube-api-access-2259k") pod "65ac81c2-6839-4a1f-955a-aaac457cc800" (UID: "65ac81c2-6839-4a1f-955a-aaac457cc800"). InnerVolumeSpecName "kube-api-access-2259k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:27:54 crc kubenswrapper[4710]: I1009 09:27:54.555671 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65ac81c2-6839-4a1f-955a-aaac457cc800-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "65ac81c2-6839-4a1f-955a-aaac457cc800" (UID: "65ac81c2-6839-4a1f-955a-aaac457cc800"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:27:54 crc kubenswrapper[4710]: I1009 09:27:54.623141 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/65ac81c2-6839-4a1f-955a-aaac457cc800-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:27:54 crc kubenswrapper[4710]: I1009 09:27:54.623171 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/65ac81c2-6839-4a1f-955a-aaac457cc800-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:27:54 crc kubenswrapper[4710]: I1009 09:27:54.623201 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2259k\" (UniqueName: \"kubernetes.io/projected/65ac81c2-6839-4a1f-955a-aaac457cc800-kube-api-access-2259k\") on node \"crc\" DevicePath \"\"" Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.034913 4710 generic.go:334] "Generic (PLEG): container finished" podID="65ac81c2-6839-4a1f-955a-aaac457cc800" containerID="8bddc07856a3a8940ab06cbc42df9ee78e2e0a3ae479f064df9c6c3cde05d422" exitCode=0 Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.034972 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-smgpf" Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.035013 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-smgpf" event={"ID":"65ac81c2-6839-4a1f-955a-aaac457cc800","Type":"ContainerDied","Data":"8bddc07856a3a8940ab06cbc42df9ee78e2e0a3ae479f064df9c6c3cde05d422"} Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.035395 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-smgpf" event={"ID":"65ac81c2-6839-4a1f-955a-aaac457cc800","Type":"ContainerDied","Data":"3b16166431b8c5d1bfc5433a690578473fac3e5e0bed63dc77494ad7aee2b461"} Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.035414 4710 scope.go:117] "RemoveContainer" containerID="8bddc07856a3a8940ab06cbc42df9ee78e2e0a3ae479f064df9c6c3cde05d422" Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.061524 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-smgpf"] Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.067569 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-smgpf"] Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.073621 4710 scope.go:117] "RemoveContainer" containerID="5509c97437ee30327a55319872b8f2952f8d032bfe8234c3d423f206a40fd7b2" Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.098631 4710 scope.go:117] "RemoveContainer" containerID="6b246818651c2cef702304975dbc3e7b5c20bcc9edcff742a08b54dedf7ce68d" Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.127213 4710 scope.go:117] "RemoveContainer" containerID="8bddc07856a3a8940ab06cbc42df9ee78e2e0a3ae479f064df9c6c3cde05d422" Oct 09 09:27:55 crc kubenswrapper[4710]: E1009 09:27:55.127527 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8bddc07856a3a8940ab06cbc42df9ee78e2e0a3ae479f064df9c6c3cde05d422\": container with ID starting with 8bddc07856a3a8940ab06cbc42df9ee78e2e0a3ae479f064df9c6c3cde05d422 not found: ID does not exist" containerID="8bddc07856a3a8940ab06cbc42df9ee78e2e0a3ae479f064df9c6c3cde05d422" Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.127553 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bddc07856a3a8940ab06cbc42df9ee78e2e0a3ae479f064df9c6c3cde05d422"} err="failed to get container status \"8bddc07856a3a8940ab06cbc42df9ee78e2e0a3ae479f064df9c6c3cde05d422\": rpc error: code = NotFound desc = could not find container \"8bddc07856a3a8940ab06cbc42df9ee78e2e0a3ae479f064df9c6c3cde05d422\": container with ID starting with 8bddc07856a3a8940ab06cbc42df9ee78e2e0a3ae479f064df9c6c3cde05d422 not found: ID does not exist" Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.127571 4710 scope.go:117] "RemoveContainer" containerID="5509c97437ee30327a55319872b8f2952f8d032bfe8234c3d423f206a40fd7b2" Oct 09 09:27:55 crc kubenswrapper[4710]: E1009 09:27:55.127879 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5509c97437ee30327a55319872b8f2952f8d032bfe8234c3d423f206a40fd7b2\": container with ID starting with 5509c97437ee30327a55319872b8f2952f8d032bfe8234c3d423f206a40fd7b2 not found: ID does not exist" containerID="5509c97437ee30327a55319872b8f2952f8d032bfe8234c3d423f206a40fd7b2" Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.127916 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5509c97437ee30327a55319872b8f2952f8d032bfe8234c3d423f206a40fd7b2"} err="failed to get container status \"5509c97437ee30327a55319872b8f2952f8d032bfe8234c3d423f206a40fd7b2\": rpc error: code = NotFound desc = could not find container \"5509c97437ee30327a55319872b8f2952f8d032bfe8234c3d423f206a40fd7b2\": container with ID starting with 5509c97437ee30327a55319872b8f2952f8d032bfe8234c3d423f206a40fd7b2 not found: ID does not exist" Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.127930 4710 scope.go:117] "RemoveContainer" containerID="6b246818651c2cef702304975dbc3e7b5c20bcc9edcff742a08b54dedf7ce68d" Oct 09 09:27:55 crc kubenswrapper[4710]: E1009 09:27:55.128248 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b246818651c2cef702304975dbc3e7b5c20bcc9edcff742a08b54dedf7ce68d\": container with ID starting with 6b246818651c2cef702304975dbc3e7b5c20bcc9edcff742a08b54dedf7ce68d not found: ID does not exist" containerID="6b246818651c2cef702304975dbc3e7b5c20bcc9edcff742a08b54dedf7ce68d" Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.128269 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b246818651c2cef702304975dbc3e7b5c20bcc9edcff742a08b54dedf7ce68d"} err="failed to get container status \"6b246818651c2cef702304975dbc3e7b5c20bcc9edcff742a08b54dedf7ce68d\": rpc error: code = NotFound desc = could not find container \"6b246818651c2cef702304975dbc3e7b5c20bcc9edcff742a08b54dedf7ce68d\": container with ID starting with 6b246818651c2cef702304975dbc3e7b5c20bcc9edcff742a08b54dedf7ce68d not found: ID does not exist" Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.925117 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fbxtr"] Oct 09 09:27:55 crc kubenswrapper[4710]: E1009 09:27:55.925784 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65ac81c2-6839-4a1f-955a-aaac457cc800" containerName="registry-server" Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.925801 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="65ac81c2-6839-4a1f-955a-aaac457cc800" containerName="registry-server" Oct 09 09:27:55 crc kubenswrapper[4710]: E1009 09:27:55.925824 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65ac81c2-6839-4a1f-955a-aaac457cc800" containerName="extract-utilities" Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.925831 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="65ac81c2-6839-4a1f-955a-aaac457cc800" containerName="extract-utilities" Oct 09 09:27:55 crc kubenswrapper[4710]: E1009 09:27:55.925846 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65ac81c2-6839-4a1f-955a-aaac457cc800" containerName="extract-content" Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.925852 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="65ac81c2-6839-4a1f-955a-aaac457cc800" containerName="extract-content" Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.926029 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="65ac81c2-6839-4a1f-955a-aaac457cc800" containerName="registry-server" Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.927117 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fbxtr" Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.944461 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fbxtr"] Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.947191 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50a71532-6f74-48c2-bb1a-bfcb0a4f4d07-catalog-content\") pod \"redhat-marketplace-fbxtr\" (UID: \"50a71532-6f74-48c2-bb1a-bfcb0a4f4d07\") " pod="openshift-marketplace/redhat-marketplace-fbxtr" Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.947274 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50a71532-6f74-48c2-bb1a-bfcb0a4f4d07-utilities\") pod \"redhat-marketplace-fbxtr\" (UID: \"50a71532-6f74-48c2-bb1a-bfcb0a4f4d07\") " pod="openshift-marketplace/redhat-marketplace-fbxtr" Oct 09 09:27:55 crc kubenswrapper[4710]: I1009 09:27:55.947401 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9dml\" (UniqueName: \"kubernetes.io/projected/50a71532-6f74-48c2-bb1a-bfcb0a4f4d07-kube-api-access-x9dml\") pod \"redhat-marketplace-fbxtr\" (UID: \"50a71532-6f74-48c2-bb1a-bfcb0a4f4d07\") " pod="openshift-marketplace/redhat-marketplace-fbxtr" Oct 09 09:27:56 crc kubenswrapper[4710]: I1009 09:27:56.049991 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50a71532-6f74-48c2-bb1a-bfcb0a4f4d07-catalog-content\") pod \"redhat-marketplace-fbxtr\" (UID: \"50a71532-6f74-48c2-bb1a-bfcb0a4f4d07\") " pod="openshift-marketplace/redhat-marketplace-fbxtr" Oct 09 09:27:56 crc kubenswrapper[4710]: I1009 09:27:56.050645 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50a71532-6f74-48c2-bb1a-bfcb0a4f4d07-catalog-content\") pod \"redhat-marketplace-fbxtr\" (UID: \"50a71532-6f74-48c2-bb1a-bfcb0a4f4d07\") " pod="openshift-marketplace/redhat-marketplace-fbxtr" Oct 09 09:27:56 crc kubenswrapper[4710]: I1009 09:27:56.050832 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50a71532-6f74-48c2-bb1a-bfcb0a4f4d07-utilities\") pod \"redhat-marketplace-fbxtr\" (UID: \"50a71532-6f74-48c2-bb1a-bfcb0a4f4d07\") " pod="openshift-marketplace/redhat-marketplace-fbxtr" Oct 09 09:27:56 crc kubenswrapper[4710]: I1009 09:27:56.051173 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9dml\" (UniqueName: \"kubernetes.io/projected/50a71532-6f74-48c2-bb1a-bfcb0a4f4d07-kube-api-access-x9dml\") pod \"redhat-marketplace-fbxtr\" (UID: \"50a71532-6f74-48c2-bb1a-bfcb0a4f4d07\") " pod="openshift-marketplace/redhat-marketplace-fbxtr" Oct 09 09:27:56 crc kubenswrapper[4710]: I1009 09:27:56.051195 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50a71532-6f74-48c2-bb1a-bfcb0a4f4d07-utilities\") pod \"redhat-marketplace-fbxtr\" (UID: \"50a71532-6f74-48c2-bb1a-bfcb0a4f4d07\") " pod="openshift-marketplace/redhat-marketplace-fbxtr" Oct 09 09:27:56 crc kubenswrapper[4710]: I1009 09:27:56.070926 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9dml\" (UniqueName: \"kubernetes.io/projected/50a71532-6f74-48c2-bb1a-bfcb0a4f4d07-kube-api-access-x9dml\") pod \"redhat-marketplace-fbxtr\" (UID: \"50a71532-6f74-48c2-bb1a-bfcb0a4f4d07\") " pod="openshift-marketplace/redhat-marketplace-fbxtr" Oct 09 09:27:56 crc kubenswrapper[4710]: I1009 09:27:56.246046 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fbxtr" Oct 09 09:27:56 crc kubenswrapper[4710]: I1009 09:27:56.639212 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fbxtr"] Oct 09 09:27:56 crc kubenswrapper[4710]: I1009 09:27:56.822842 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65ac81c2-6839-4a1f-955a-aaac457cc800" path="/var/lib/kubelet/pods/65ac81c2-6839-4a1f-955a-aaac457cc800/volumes" Oct 09 09:27:57 crc kubenswrapper[4710]: I1009 09:27:57.051422 4710 generic.go:334] "Generic (PLEG): container finished" podID="50a71532-6f74-48c2-bb1a-bfcb0a4f4d07" containerID="0137208c4c39d632ebecfbcaf846921c4417bd0cef6d603baf1fe880da73a389" exitCode=0 Oct 09 09:27:57 crc kubenswrapper[4710]: I1009 09:27:57.051476 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fbxtr" event={"ID":"50a71532-6f74-48c2-bb1a-bfcb0a4f4d07","Type":"ContainerDied","Data":"0137208c4c39d632ebecfbcaf846921c4417bd0cef6d603baf1fe880da73a389"} Oct 09 09:27:57 crc kubenswrapper[4710]: I1009 09:27:57.051499 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fbxtr" event={"ID":"50a71532-6f74-48c2-bb1a-bfcb0a4f4d07","Type":"ContainerStarted","Data":"7c5417cada1df6977ff35f614c3e972bce0aa00966cad0e955530c3db5e16d34"} Oct 09 09:27:58 crc kubenswrapper[4710]: I1009 09:27:58.061653 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fbxtr" event={"ID":"50a71532-6f74-48c2-bb1a-bfcb0a4f4d07","Type":"ContainerStarted","Data":"4ab770a78dc5f768b851ff32cfbc2885f3933721985019e7001809dcb33f6b27"} Oct 09 09:27:59 crc kubenswrapper[4710]: I1009 09:27:59.069065 4710 generic.go:334] "Generic (PLEG): container finished" podID="50a71532-6f74-48c2-bb1a-bfcb0a4f4d07" containerID="4ab770a78dc5f768b851ff32cfbc2885f3933721985019e7001809dcb33f6b27" exitCode=0 Oct 09 09:27:59 crc kubenswrapper[4710]: I1009 09:27:59.069103 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fbxtr" event={"ID":"50a71532-6f74-48c2-bb1a-bfcb0a4f4d07","Type":"ContainerDied","Data":"4ab770a78dc5f768b851ff32cfbc2885f3933721985019e7001809dcb33f6b27"} Oct 09 09:28:00 crc kubenswrapper[4710]: I1009 09:28:00.077409 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fbxtr" event={"ID":"50a71532-6f74-48c2-bb1a-bfcb0a4f4d07","Type":"ContainerStarted","Data":"34748c2cf6a174adf6b8934c62d3b302fc4f07be63ac2144e991eb3d5dfe90e9"} Oct 09 09:28:00 crc kubenswrapper[4710]: I1009 09:28:00.093505 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fbxtr" podStartSLOduration=2.429784936 podStartE2EDuration="5.093493095s" podCreationTimestamp="2025-10-09 09:27:55 +0000 UTC" firstStartedPulling="2025-10-09 09:27:57.053181352 +0000 UTC m=+1400.543289748" lastFinishedPulling="2025-10-09 09:27:59.71688951 +0000 UTC m=+1403.206997907" observedRunningTime="2025-10-09 09:28:00.090578879 +0000 UTC m=+1403.580687276" watchObservedRunningTime="2025-10-09 09:28:00.093493095 +0000 UTC m=+1403.583601491" Oct 09 09:28:06 crc kubenswrapper[4710]: I1009 09:28:06.246788 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fbxtr" Oct 09 09:28:06 crc kubenswrapper[4710]: I1009 09:28:06.247161 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fbxtr" Oct 09 09:28:06 crc kubenswrapper[4710]: I1009 09:28:06.279127 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fbxtr" Oct 09 09:28:07 crc kubenswrapper[4710]: I1009 09:28:07.155140 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fbxtr" Oct 09 09:28:07 crc kubenswrapper[4710]: I1009 09:28:07.186098 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fbxtr"] Oct 09 09:28:09 crc kubenswrapper[4710]: I1009 09:28:09.134712 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fbxtr" podUID="50a71532-6f74-48c2-bb1a-bfcb0a4f4d07" containerName="registry-server" containerID="cri-o://34748c2cf6a174adf6b8934c62d3b302fc4f07be63ac2144e991eb3d5dfe90e9" gracePeriod=2 Oct 09 09:28:09 crc kubenswrapper[4710]: I1009 09:28:09.484530 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fbxtr" Oct 09 09:28:09 crc kubenswrapper[4710]: I1009 09:28:09.557415 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50a71532-6f74-48c2-bb1a-bfcb0a4f4d07-catalog-content\") pod \"50a71532-6f74-48c2-bb1a-bfcb0a4f4d07\" (UID: \"50a71532-6f74-48c2-bb1a-bfcb0a4f4d07\") " Oct 09 09:28:09 crc kubenswrapper[4710]: I1009 09:28:09.557707 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50a71532-6f74-48c2-bb1a-bfcb0a4f4d07-utilities\") pod \"50a71532-6f74-48c2-bb1a-bfcb0a4f4d07\" (UID: \"50a71532-6f74-48c2-bb1a-bfcb0a4f4d07\") " Oct 09 09:28:09 crc kubenswrapper[4710]: I1009 09:28:09.557777 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9dml\" (UniqueName: \"kubernetes.io/projected/50a71532-6f74-48c2-bb1a-bfcb0a4f4d07-kube-api-access-x9dml\") pod \"50a71532-6f74-48c2-bb1a-bfcb0a4f4d07\" (UID: \"50a71532-6f74-48c2-bb1a-bfcb0a4f4d07\") " Oct 09 09:28:09 crc kubenswrapper[4710]: I1009 09:28:09.558695 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50a71532-6f74-48c2-bb1a-bfcb0a4f4d07-utilities" (OuterVolumeSpecName: "utilities") pod "50a71532-6f74-48c2-bb1a-bfcb0a4f4d07" (UID: "50a71532-6f74-48c2-bb1a-bfcb0a4f4d07"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:28:09 crc kubenswrapper[4710]: I1009 09:28:09.561974 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50a71532-6f74-48c2-bb1a-bfcb0a4f4d07-kube-api-access-x9dml" (OuterVolumeSpecName: "kube-api-access-x9dml") pod "50a71532-6f74-48c2-bb1a-bfcb0a4f4d07" (UID: "50a71532-6f74-48c2-bb1a-bfcb0a4f4d07"). InnerVolumeSpecName "kube-api-access-x9dml". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:28:09 crc kubenswrapper[4710]: I1009 09:28:09.570271 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50a71532-6f74-48c2-bb1a-bfcb0a4f4d07-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "50a71532-6f74-48c2-bb1a-bfcb0a4f4d07" (UID: "50a71532-6f74-48c2-bb1a-bfcb0a4f4d07"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:28:09 crc kubenswrapper[4710]: I1009 09:28:09.659918 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50a71532-6f74-48c2-bb1a-bfcb0a4f4d07-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:28:09 crc kubenswrapper[4710]: I1009 09:28:09.659946 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9dml\" (UniqueName: \"kubernetes.io/projected/50a71532-6f74-48c2-bb1a-bfcb0a4f4d07-kube-api-access-x9dml\") on node \"crc\" DevicePath \"\"" Oct 09 09:28:09 crc kubenswrapper[4710]: I1009 09:28:09.659956 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50a71532-6f74-48c2-bb1a-bfcb0a4f4d07-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:28:10 crc kubenswrapper[4710]: I1009 09:28:10.143611 4710 generic.go:334] "Generic (PLEG): container finished" podID="50a71532-6f74-48c2-bb1a-bfcb0a4f4d07" containerID="34748c2cf6a174adf6b8934c62d3b302fc4f07be63ac2144e991eb3d5dfe90e9" exitCode=0 Oct 09 09:28:10 crc kubenswrapper[4710]: I1009 09:28:10.143650 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fbxtr" Oct 09 09:28:10 crc kubenswrapper[4710]: I1009 09:28:10.143652 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fbxtr" event={"ID":"50a71532-6f74-48c2-bb1a-bfcb0a4f4d07","Type":"ContainerDied","Data":"34748c2cf6a174adf6b8934c62d3b302fc4f07be63ac2144e991eb3d5dfe90e9"} Oct 09 09:28:10 crc kubenswrapper[4710]: I1009 09:28:10.143797 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fbxtr" event={"ID":"50a71532-6f74-48c2-bb1a-bfcb0a4f4d07","Type":"ContainerDied","Data":"7c5417cada1df6977ff35f614c3e972bce0aa00966cad0e955530c3db5e16d34"} Oct 09 09:28:10 crc kubenswrapper[4710]: I1009 09:28:10.143824 4710 scope.go:117] "RemoveContainer" containerID="34748c2cf6a174adf6b8934c62d3b302fc4f07be63ac2144e991eb3d5dfe90e9" Oct 09 09:28:10 crc kubenswrapper[4710]: I1009 09:28:10.165100 4710 scope.go:117] "RemoveContainer" containerID="4ab770a78dc5f768b851ff32cfbc2885f3933721985019e7001809dcb33f6b27" Oct 09 09:28:10 crc kubenswrapper[4710]: I1009 09:28:10.168624 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fbxtr"] Oct 09 09:28:10 crc kubenswrapper[4710]: I1009 09:28:10.174200 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fbxtr"] Oct 09 09:28:10 crc kubenswrapper[4710]: I1009 09:28:10.182059 4710 scope.go:117] "RemoveContainer" containerID="0137208c4c39d632ebecfbcaf846921c4417bd0cef6d603baf1fe880da73a389" Oct 09 09:28:10 crc kubenswrapper[4710]: I1009 09:28:10.211088 4710 scope.go:117] "RemoveContainer" containerID="34748c2cf6a174adf6b8934c62d3b302fc4f07be63ac2144e991eb3d5dfe90e9" Oct 09 09:28:10 crc kubenswrapper[4710]: E1009 09:28:10.211423 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34748c2cf6a174adf6b8934c62d3b302fc4f07be63ac2144e991eb3d5dfe90e9\": container with ID starting with 34748c2cf6a174adf6b8934c62d3b302fc4f07be63ac2144e991eb3d5dfe90e9 not found: ID does not exist" containerID="34748c2cf6a174adf6b8934c62d3b302fc4f07be63ac2144e991eb3d5dfe90e9" Oct 09 09:28:10 crc kubenswrapper[4710]: I1009 09:28:10.211482 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34748c2cf6a174adf6b8934c62d3b302fc4f07be63ac2144e991eb3d5dfe90e9"} err="failed to get container status \"34748c2cf6a174adf6b8934c62d3b302fc4f07be63ac2144e991eb3d5dfe90e9\": rpc error: code = NotFound desc = could not find container \"34748c2cf6a174adf6b8934c62d3b302fc4f07be63ac2144e991eb3d5dfe90e9\": container with ID starting with 34748c2cf6a174adf6b8934c62d3b302fc4f07be63ac2144e991eb3d5dfe90e9 not found: ID does not exist" Oct 09 09:28:10 crc kubenswrapper[4710]: I1009 09:28:10.211506 4710 scope.go:117] "RemoveContainer" containerID="4ab770a78dc5f768b851ff32cfbc2885f3933721985019e7001809dcb33f6b27" Oct 09 09:28:10 crc kubenswrapper[4710]: E1009 09:28:10.211758 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ab770a78dc5f768b851ff32cfbc2885f3933721985019e7001809dcb33f6b27\": container with ID starting with 4ab770a78dc5f768b851ff32cfbc2885f3933721985019e7001809dcb33f6b27 not found: ID does not exist" containerID="4ab770a78dc5f768b851ff32cfbc2885f3933721985019e7001809dcb33f6b27" Oct 09 09:28:10 crc kubenswrapper[4710]: I1009 09:28:10.211780 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ab770a78dc5f768b851ff32cfbc2885f3933721985019e7001809dcb33f6b27"} err="failed to get container status \"4ab770a78dc5f768b851ff32cfbc2885f3933721985019e7001809dcb33f6b27\": rpc error: code = NotFound desc = could not find container \"4ab770a78dc5f768b851ff32cfbc2885f3933721985019e7001809dcb33f6b27\": container with ID starting with 4ab770a78dc5f768b851ff32cfbc2885f3933721985019e7001809dcb33f6b27 not found: ID does not exist" Oct 09 09:28:10 crc kubenswrapper[4710]: I1009 09:28:10.211793 4710 scope.go:117] "RemoveContainer" containerID="0137208c4c39d632ebecfbcaf846921c4417bd0cef6d603baf1fe880da73a389" Oct 09 09:28:10 crc kubenswrapper[4710]: E1009 09:28:10.211993 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0137208c4c39d632ebecfbcaf846921c4417bd0cef6d603baf1fe880da73a389\": container with ID starting with 0137208c4c39d632ebecfbcaf846921c4417bd0cef6d603baf1fe880da73a389 not found: ID does not exist" containerID="0137208c4c39d632ebecfbcaf846921c4417bd0cef6d603baf1fe880da73a389" Oct 09 09:28:10 crc kubenswrapper[4710]: I1009 09:28:10.212085 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0137208c4c39d632ebecfbcaf846921c4417bd0cef6d603baf1fe880da73a389"} err="failed to get container status \"0137208c4c39d632ebecfbcaf846921c4417bd0cef6d603baf1fe880da73a389\": rpc error: code = NotFound desc = could not find container \"0137208c4c39d632ebecfbcaf846921c4417bd0cef6d603baf1fe880da73a389\": container with ID starting with 0137208c4c39d632ebecfbcaf846921c4417bd0cef6d603baf1fe880da73a389 not found: ID does not exist" Oct 09 09:28:10 crc kubenswrapper[4710]: I1009 09:28:10.825899 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50a71532-6f74-48c2-bb1a-bfcb0a4f4d07" path="/var/lib/kubelet/pods/50a71532-6f74-48c2-bb1a-bfcb0a4f4d07/volumes" Oct 09 09:28:35 crc kubenswrapper[4710]: I1009 09:28:35.546417 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:28:35 crc kubenswrapper[4710]: I1009 09:28:35.546918 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:28:54 crc kubenswrapper[4710]: I1009 09:28:54.473642 4710 generic.go:334] "Generic (PLEG): container finished" podID="e85c0fdc-926b-4a34-9578-b19fd827b749" containerID="f362f78f15219982d6c9252adfe7e5fa3ff93433b0bd8f193416a426098612b6" exitCode=0 Oct 09 09:28:54 crc kubenswrapper[4710]: I1009 09:28:54.473730 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj" event={"ID":"e85c0fdc-926b-4a34-9578-b19fd827b749","Type":"ContainerDied","Data":"f362f78f15219982d6c9252adfe7e5fa3ff93433b0bd8f193416a426098612b6"} Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:55.815880 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:55.819499 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e85c0fdc-926b-4a34-9578-b19fd827b749-ssh-key\") pod \"e85c0fdc-926b-4a34-9578-b19fd827b749\" (UID: \"e85c0fdc-926b-4a34-9578-b19fd827b749\") " Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:55.819627 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lpmg\" (UniqueName: \"kubernetes.io/projected/e85c0fdc-926b-4a34-9578-b19fd827b749-kube-api-access-6lpmg\") pod \"e85c0fdc-926b-4a34-9578-b19fd827b749\" (UID: \"e85c0fdc-926b-4a34-9578-b19fd827b749\") " Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:55.819647 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e85c0fdc-926b-4a34-9578-b19fd827b749-inventory\") pod \"e85c0fdc-926b-4a34-9578-b19fd827b749\" (UID: \"e85c0fdc-926b-4a34-9578-b19fd827b749\") " Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:55.826256 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e85c0fdc-926b-4a34-9578-b19fd827b749-kube-api-access-6lpmg" (OuterVolumeSpecName: "kube-api-access-6lpmg") pod "e85c0fdc-926b-4a34-9578-b19fd827b749" (UID: "e85c0fdc-926b-4a34-9578-b19fd827b749"). InnerVolumeSpecName "kube-api-access-6lpmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:55.853219 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e85c0fdc-926b-4a34-9578-b19fd827b749-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e85c0fdc-926b-4a34-9578-b19fd827b749" (UID: "e85c0fdc-926b-4a34-9578-b19fd827b749"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:55.856259 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e85c0fdc-926b-4a34-9578-b19fd827b749-inventory" (OuterVolumeSpecName: "inventory") pod "e85c0fdc-926b-4a34-9578-b19fd827b749" (UID: "e85c0fdc-926b-4a34-9578-b19fd827b749"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:55.922302 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lpmg\" (UniqueName: \"kubernetes.io/projected/e85c0fdc-926b-4a34-9578-b19fd827b749-kube-api-access-6lpmg\") on node \"crc\" DevicePath \"\"" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:55.922327 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e85c0fdc-926b-4a34-9578-b19fd827b749-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:55.922337 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e85c0fdc-926b-4a34-9578-b19fd827b749-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.491566 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj" event={"ID":"e85c0fdc-926b-4a34-9578-b19fd827b749","Type":"ContainerDied","Data":"462fdbe3d932c65699d50670668489d2bc994687baf307e37867c3086e95a4fb"} Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.491649 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="462fdbe3d932c65699d50670668489d2bc994687baf307e37867c3086e95a4fb" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.491605 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.555837 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk"] Oct 09 09:28:56 crc kubenswrapper[4710]: E1009 09:28:56.556145 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e85c0fdc-926b-4a34-9578-b19fd827b749" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.556162 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="e85c0fdc-926b-4a34-9578-b19fd827b749" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 09 09:28:56 crc kubenswrapper[4710]: E1009 09:28:56.556182 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50a71532-6f74-48c2-bb1a-bfcb0a4f4d07" containerName="registry-server" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.556188 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="50a71532-6f74-48c2-bb1a-bfcb0a4f4d07" containerName="registry-server" Oct 09 09:28:56 crc kubenswrapper[4710]: E1009 09:28:56.556198 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50a71532-6f74-48c2-bb1a-bfcb0a4f4d07" containerName="extract-content" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.556203 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="50a71532-6f74-48c2-bb1a-bfcb0a4f4d07" containerName="extract-content" Oct 09 09:28:56 crc kubenswrapper[4710]: E1009 09:28:56.556213 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50a71532-6f74-48c2-bb1a-bfcb0a4f4d07" containerName="extract-utilities" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.556219 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="50a71532-6f74-48c2-bb1a-bfcb0a4f4d07" containerName="extract-utilities" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.556360 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="50a71532-6f74-48c2-bb1a-bfcb0a4f4d07" containerName="registry-server" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.556380 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="e85c0fdc-926b-4a34-9578-b19fd827b749" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.556887 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.558629 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.558859 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.559068 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.560016 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.566687 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk"] Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.633678 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16969c4a-5d88-4cf5-a512-677bf1c2a3ac-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk\" (UID: \"16969c4a-5d88-4cf5-a512-677bf1c2a3ac\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.633798 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16969c4a-5d88-4cf5-a512-677bf1c2a3ac-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk\" (UID: \"16969c4a-5d88-4cf5-a512-677bf1c2a3ac\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.633846 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bds9j\" (UniqueName: \"kubernetes.io/projected/16969c4a-5d88-4cf5-a512-677bf1c2a3ac-kube-api-access-bds9j\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk\" (UID: \"16969c4a-5d88-4cf5-a512-677bf1c2a3ac\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.735637 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16969c4a-5d88-4cf5-a512-677bf1c2a3ac-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk\" (UID: \"16969c4a-5d88-4cf5-a512-677bf1c2a3ac\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.736019 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16969c4a-5d88-4cf5-a512-677bf1c2a3ac-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk\" (UID: \"16969c4a-5d88-4cf5-a512-677bf1c2a3ac\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.736069 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bds9j\" (UniqueName: \"kubernetes.io/projected/16969c4a-5d88-4cf5-a512-677bf1c2a3ac-kube-api-access-bds9j\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk\" (UID: \"16969c4a-5d88-4cf5-a512-677bf1c2a3ac\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.740813 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16969c4a-5d88-4cf5-a512-677bf1c2a3ac-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk\" (UID: \"16969c4a-5d88-4cf5-a512-677bf1c2a3ac\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.742494 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16969c4a-5d88-4cf5-a512-677bf1c2a3ac-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk\" (UID: \"16969c4a-5d88-4cf5-a512-677bf1c2a3ac\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.755158 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bds9j\" (UniqueName: \"kubernetes.io/projected/16969c4a-5d88-4cf5-a512-677bf1c2a3ac-kube-api-access-bds9j\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk\" (UID: \"16969c4a-5d88-4cf5-a512-677bf1c2a3ac\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk" Oct 09 09:28:56 crc kubenswrapper[4710]: I1009 09:28:56.869942 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk" Oct 09 09:28:57 crc kubenswrapper[4710]: I1009 09:28:57.344181 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk"] Oct 09 09:28:57 crc kubenswrapper[4710]: I1009 09:28:57.503106 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk" event={"ID":"16969c4a-5d88-4cf5-a512-677bf1c2a3ac","Type":"ContainerStarted","Data":"14ddc75bfdaa1f49116fab14d2af7f04d4f700fa9a386219a7ad471baccf2169"} Oct 09 09:28:58 crc kubenswrapper[4710]: I1009 09:28:58.515970 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk" event={"ID":"16969c4a-5d88-4cf5-a512-677bf1c2a3ac","Type":"ContainerStarted","Data":"2243d4256ac2afa7936aeeb2ef39b47da418830dac6c09a1a374dbe19ccc06a3"} Oct 09 09:28:58 crc kubenswrapper[4710]: I1009 09:28:58.531654 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk" podStartSLOduration=1.858291584 podStartE2EDuration="2.53163784s" podCreationTimestamp="2025-10-09 09:28:56 +0000 UTC" firstStartedPulling="2025-10-09 09:28:57.34985162 +0000 UTC m=+1460.839960018" lastFinishedPulling="2025-10-09 09:28:58.023197867 +0000 UTC m=+1461.513306274" observedRunningTime="2025-10-09 09:28:58.529658518 +0000 UTC m=+1462.019766915" watchObservedRunningTime="2025-10-09 09:28:58.53163784 +0000 UTC m=+1462.021746238" Oct 09 09:29:02 crc kubenswrapper[4710]: I1009 09:29:02.547727 4710 generic.go:334] "Generic (PLEG): container finished" podID="16969c4a-5d88-4cf5-a512-677bf1c2a3ac" containerID="2243d4256ac2afa7936aeeb2ef39b47da418830dac6c09a1a374dbe19ccc06a3" exitCode=0 Oct 09 09:29:02 crc kubenswrapper[4710]: I1009 09:29:02.547787 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk" event={"ID":"16969c4a-5d88-4cf5-a512-677bf1c2a3ac","Type":"ContainerDied","Data":"2243d4256ac2afa7936aeeb2ef39b47da418830dac6c09a1a374dbe19ccc06a3"} Oct 09 09:29:03 crc kubenswrapper[4710]: I1009 09:29:03.849004 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk" Oct 09 09:29:03 crc kubenswrapper[4710]: I1009 09:29:03.992406 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16969c4a-5d88-4cf5-a512-677bf1c2a3ac-inventory\") pod \"16969c4a-5d88-4cf5-a512-677bf1c2a3ac\" (UID: \"16969c4a-5d88-4cf5-a512-677bf1c2a3ac\") " Oct 09 09:29:03 crc kubenswrapper[4710]: I1009 09:29:03.992468 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16969c4a-5d88-4cf5-a512-677bf1c2a3ac-ssh-key\") pod \"16969c4a-5d88-4cf5-a512-677bf1c2a3ac\" (UID: \"16969c4a-5d88-4cf5-a512-677bf1c2a3ac\") " Oct 09 09:29:03 crc kubenswrapper[4710]: I1009 09:29:03.992572 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bds9j\" (UniqueName: \"kubernetes.io/projected/16969c4a-5d88-4cf5-a512-677bf1c2a3ac-kube-api-access-bds9j\") pod \"16969c4a-5d88-4cf5-a512-677bf1c2a3ac\" (UID: \"16969c4a-5d88-4cf5-a512-677bf1c2a3ac\") " Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:03.997693 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16969c4a-5d88-4cf5-a512-677bf1c2a3ac-kube-api-access-bds9j" (OuterVolumeSpecName: "kube-api-access-bds9j") pod "16969c4a-5d88-4cf5-a512-677bf1c2a3ac" (UID: "16969c4a-5d88-4cf5-a512-677bf1c2a3ac"). InnerVolumeSpecName "kube-api-access-bds9j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.012881 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16969c4a-5d88-4cf5-a512-677bf1c2a3ac-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "16969c4a-5d88-4cf5-a512-677bf1c2a3ac" (UID: "16969c4a-5d88-4cf5-a512-677bf1c2a3ac"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.013562 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16969c4a-5d88-4cf5-a512-677bf1c2a3ac-inventory" (OuterVolumeSpecName: "inventory") pod "16969c4a-5d88-4cf5-a512-677bf1c2a3ac" (UID: "16969c4a-5d88-4cf5-a512-677bf1c2a3ac"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.094870 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16969c4a-5d88-4cf5-a512-677bf1c2a3ac-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.094892 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16969c4a-5d88-4cf5-a512-677bf1c2a3ac-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.094901 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bds9j\" (UniqueName: \"kubernetes.io/projected/16969c4a-5d88-4cf5-a512-677bf1c2a3ac-kube-api-access-bds9j\") on node \"crc\" DevicePath \"\"" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.562457 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk" event={"ID":"16969c4a-5d88-4cf5-a512-677bf1c2a3ac","Type":"ContainerDied","Data":"14ddc75bfdaa1f49116fab14d2af7f04d4f700fa9a386219a7ad471baccf2169"} Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.562491 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14ddc75bfdaa1f49116fab14d2af7f04d4f700fa9a386219a7ad471baccf2169" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.562537 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.619760 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4"] Oct 09 09:29:04 crc kubenswrapper[4710]: E1009 09:29:04.620068 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16969c4a-5d88-4cf5-a512-677bf1c2a3ac" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.620084 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="16969c4a-5d88-4cf5-a512-677bf1c2a3ac" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.620258 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="16969c4a-5d88-4cf5-a512-677bf1c2a3ac" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.620777 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.624885 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.625228 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.625364 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.627150 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.632473 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4"] Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.806060 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d7c8160-10f5-4908-a278-d8265862ef51-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dftt4\" (UID: \"8d7c8160-10f5-4908-a278-d8265862ef51\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.806118 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcmgz\" (UniqueName: \"kubernetes.io/projected/8d7c8160-10f5-4908-a278-d8265862ef51-kube-api-access-bcmgz\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dftt4\" (UID: \"8d7c8160-10f5-4908-a278-d8265862ef51\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.806236 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d7c8160-10f5-4908-a278-d8265862ef51-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dftt4\" (UID: \"8d7c8160-10f5-4908-a278-d8265862ef51\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.907314 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d7c8160-10f5-4908-a278-d8265862ef51-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dftt4\" (UID: \"8d7c8160-10f5-4908-a278-d8265862ef51\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.907924 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d7c8160-10f5-4908-a278-d8265862ef51-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dftt4\" (UID: \"8d7c8160-10f5-4908-a278-d8265862ef51\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.907989 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcmgz\" (UniqueName: \"kubernetes.io/projected/8d7c8160-10f5-4908-a278-d8265862ef51-kube-api-access-bcmgz\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dftt4\" (UID: \"8d7c8160-10f5-4908-a278-d8265862ef51\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.912036 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d7c8160-10f5-4908-a278-d8265862ef51-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dftt4\" (UID: \"8d7c8160-10f5-4908-a278-d8265862ef51\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.914907 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d7c8160-10f5-4908-a278-d8265862ef51-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dftt4\" (UID: \"8d7c8160-10f5-4908-a278-d8265862ef51\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.925711 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcmgz\" (UniqueName: \"kubernetes.io/projected/8d7c8160-10f5-4908-a278-d8265862ef51-kube-api-access-bcmgz\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-dftt4\" (UID: \"8d7c8160-10f5-4908-a278-d8265862ef51\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4" Oct 09 09:29:04 crc kubenswrapper[4710]: I1009 09:29:04.936494 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4" Oct 09 09:29:05 crc kubenswrapper[4710]: I1009 09:29:05.389237 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4"] Oct 09 09:29:05 crc kubenswrapper[4710]: I1009 09:29:05.546335 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:29:05 crc kubenswrapper[4710]: I1009 09:29:05.546394 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:29:05 crc kubenswrapper[4710]: I1009 09:29:05.569942 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4" event={"ID":"8d7c8160-10f5-4908-a278-d8265862ef51","Type":"ContainerStarted","Data":"53386bb17f8234df492d3b636e6bc5cfdf7ba4a6c079ec445239cee12e91d63d"} Oct 09 09:29:06 crc kubenswrapper[4710]: I1009 09:29:06.578964 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4" event={"ID":"8d7c8160-10f5-4908-a278-d8265862ef51","Type":"ContainerStarted","Data":"e3a4b9fa37e87ad8a55189d8ac640d746bbbc976f0762dd5b06569154a94d6e5"} Oct 09 09:29:06 crc kubenswrapper[4710]: I1009 09:29:06.599022 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4" podStartSLOduration=2.090528744 podStartE2EDuration="2.599009164s" podCreationTimestamp="2025-10-09 09:29:04 +0000 UTC" firstStartedPulling="2025-10-09 09:29:05.392616792 +0000 UTC m=+1468.882725179" lastFinishedPulling="2025-10-09 09:29:05.901097192 +0000 UTC m=+1469.391205599" observedRunningTime="2025-10-09 09:29:06.593136699 +0000 UTC m=+1470.083245096" watchObservedRunningTime="2025-10-09 09:29:06.599009164 +0000 UTC m=+1470.089117561" Oct 09 09:29:27 crc kubenswrapper[4710]: I1009 09:29:27.042466 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bj6rp"] Oct 09 09:29:27 crc kubenswrapper[4710]: I1009 09:29:27.044720 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bj6rp" Oct 09 09:29:27 crc kubenswrapper[4710]: I1009 09:29:27.056893 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bj6rp"] Oct 09 09:29:27 crc kubenswrapper[4710]: I1009 09:29:27.192938 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txlvs\" (UniqueName: \"kubernetes.io/projected/e5bb1b46-357b-43a1-be9d-d5a16317fbb9-kube-api-access-txlvs\") pod \"certified-operators-bj6rp\" (UID: \"e5bb1b46-357b-43a1-be9d-d5a16317fbb9\") " pod="openshift-marketplace/certified-operators-bj6rp" Oct 09 09:29:27 crc kubenswrapper[4710]: I1009 09:29:27.193019 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5bb1b46-357b-43a1-be9d-d5a16317fbb9-catalog-content\") pod \"certified-operators-bj6rp\" (UID: \"e5bb1b46-357b-43a1-be9d-d5a16317fbb9\") " pod="openshift-marketplace/certified-operators-bj6rp" Oct 09 09:29:27 crc kubenswrapper[4710]: I1009 09:29:27.193045 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5bb1b46-357b-43a1-be9d-d5a16317fbb9-utilities\") pod \"certified-operators-bj6rp\" (UID: \"e5bb1b46-357b-43a1-be9d-d5a16317fbb9\") " pod="openshift-marketplace/certified-operators-bj6rp" Oct 09 09:29:27 crc kubenswrapper[4710]: I1009 09:29:27.294270 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txlvs\" (UniqueName: \"kubernetes.io/projected/e5bb1b46-357b-43a1-be9d-d5a16317fbb9-kube-api-access-txlvs\") pod \"certified-operators-bj6rp\" (UID: \"e5bb1b46-357b-43a1-be9d-d5a16317fbb9\") " pod="openshift-marketplace/certified-operators-bj6rp" Oct 09 09:29:27 crc kubenswrapper[4710]: I1009 09:29:27.294402 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5bb1b46-357b-43a1-be9d-d5a16317fbb9-catalog-content\") pod \"certified-operators-bj6rp\" (UID: \"e5bb1b46-357b-43a1-be9d-d5a16317fbb9\") " pod="openshift-marketplace/certified-operators-bj6rp" Oct 09 09:29:27 crc kubenswrapper[4710]: I1009 09:29:27.294465 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5bb1b46-357b-43a1-be9d-d5a16317fbb9-utilities\") pod \"certified-operators-bj6rp\" (UID: \"e5bb1b46-357b-43a1-be9d-d5a16317fbb9\") " pod="openshift-marketplace/certified-operators-bj6rp" Oct 09 09:29:27 crc kubenswrapper[4710]: I1009 09:29:27.294812 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5bb1b46-357b-43a1-be9d-d5a16317fbb9-utilities\") pod \"certified-operators-bj6rp\" (UID: \"e5bb1b46-357b-43a1-be9d-d5a16317fbb9\") " pod="openshift-marketplace/certified-operators-bj6rp" Oct 09 09:29:27 crc kubenswrapper[4710]: I1009 09:29:27.295023 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5bb1b46-357b-43a1-be9d-d5a16317fbb9-catalog-content\") pod \"certified-operators-bj6rp\" (UID: \"e5bb1b46-357b-43a1-be9d-d5a16317fbb9\") " pod="openshift-marketplace/certified-operators-bj6rp" Oct 09 09:29:27 crc kubenswrapper[4710]: I1009 09:29:27.326111 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txlvs\" (UniqueName: \"kubernetes.io/projected/e5bb1b46-357b-43a1-be9d-d5a16317fbb9-kube-api-access-txlvs\") pod \"certified-operators-bj6rp\" (UID: \"e5bb1b46-357b-43a1-be9d-d5a16317fbb9\") " pod="openshift-marketplace/certified-operators-bj6rp" Oct 09 09:29:27 crc kubenswrapper[4710]: I1009 09:29:27.362480 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bj6rp" Oct 09 09:29:27 crc kubenswrapper[4710]: I1009 09:29:27.903349 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bj6rp"] Oct 09 09:29:28 crc kubenswrapper[4710]: I1009 09:29:28.727856 4710 generic.go:334] "Generic (PLEG): container finished" podID="e5bb1b46-357b-43a1-be9d-d5a16317fbb9" containerID="2175b6226d1755db1b8d2adef2124be79701903bea76a2fe1c07b09cd36eb155" exitCode=0 Oct 09 09:29:28 crc kubenswrapper[4710]: I1009 09:29:28.727909 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bj6rp" event={"ID":"e5bb1b46-357b-43a1-be9d-d5a16317fbb9","Type":"ContainerDied","Data":"2175b6226d1755db1b8d2adef2124be79701903bea76a2fe1c07b09cd36eb155"} Oct 09 09:29:28 crc kubenswrapper[4710]: I1009 09:29:28.728202 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bj6rp" event={"ID":"e5bb1b46-357b-43a1-be9d-d5a16317fbb9","Type":"ContainerStarted","Data":"863f5c32ac2e221228d7f1d7a965611b12d6c295dface2e8b3b86a760dea3f9a"} Oct 09 09:29:28 crc kubenswrapper[4710]: I1009 09:29:28.729533 4710 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 09:29:29 crc kubenswrapper[4710]: I1009 09:29:29.742615 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bj6rp" event={"ID":"e5bb1b46-357b-43a1-be9d-d5a16317fbb9","Type":"ContainerStarted","Data":"5a563a5f6d249986597e80934aa0f77a28e6c3fc3badfbf666b1e15b3332cb6a"} Oct 09 09:29:30 crc kubenswrapper[4710]: I1009 09:29:30.756588 4710 generic.go:334] "Generic (PLEG): container finished" podID="e5bb1b46-357b-43a1-be9d-d5a16317fbb9" containerID="5a563a5f6d249986597e80934aa0f77a28e6c3fc3badfbf666b1e15b3332cb6a" exitCode=0 Oct 09 09:29:30 crc kubenswrapper[4710]: I1009 09:29:30.756772 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bj6rp" event={"ID":"e5bb1b46-357b-43a1-be9d-d5a16317fbb9","Type":"ContainerDied","Data":"5a563a5f6d249986597e80934aa0f77a28e6c3fc3badfbf666b1e15b3332cb6a"} Oct 09 09:29:31 crc kubenswrapper[4710]: I1009 09:29:31.768731 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bj6rp" event={"ID":"e5bb1b46-357b-43a1-be9d-d5a16317fbb9","Type":"ContainerStarted","Data":"d50a47a24d78d39bea8429362111238b0483bab4b714a4298ec2fe73dde57e3f"} Oct 09 09:29:31 crc kubenswrapper[4710]: I1009 09:29:31.797880 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bj6rp" podStartSLOduration=2.26639917 podStartE2EDuration="4.797849375s" podCreationTimestamp="2025-10-09 09:29:27 +0000 UTC" firstStartedPulling="2025-10-09 09:29:28.729291454 +0000 UTC m=+1492.219399851" lastFinishedPulling="2025-10-09 09:29:31.260741659 +0000 UTC m=+1494.750850056" observedRunningTime="2025-10-09 09:29:31.790645399 +0000 UTC m=+1495.280753796" watchObservedRunningTime="2025-10-09 09:29:31.797849375 +0000 UTC m=+1495.287957772" Oct 09 09:29:34 crc kubenswrapper[4710]: I1009 09:29:34.794383 4710 generic.go:334] "Generic (PLEG): container finished" podID="8d7c8160-10f5-4908-a278-d8265862ef51" containerID="e3a4b9fa37e87ad8a55189d8ac640d746bbbc976f0762dd5b06569154a94d6e5" exitCode=0 Oct 09 09:29:34 crc kubenswrapper[4710]: I1009 09:29:34.794409 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4" event={"ID":"8d7c8160-10f5-4908-a278-d8265862ef51","Type":"ContainerDied","Data":"e3a4b9fa37e87ad8a55189d8ac640d746bbbc976f0762dd5b06569154a94d6e5"} Oct 09 09:29:35 crc kubenswrapper[4710]: I1009 09:29:35.546541 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:29:35 crc kubenswrapper[4710]: I1009 09:29:35.546646 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:29:35 crc kubenswrapper[4710]: I1009 09:29:35.546740 4710 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:29:35 crc kubenswrapper[4710]: I1009 09:29:35.548115 4710 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179"} pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 09:29:35 crc kubenswrapper[4710]: I1009 09:29:35.548205 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" containerID="cri-o://d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" gracePeriod=600 Oct 09 09:29:35 crc kubenswrapper[4710]: E1009 09:29:35.668485 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:29:35 crc kubenswrapper[4710]: I1009 09:29:35.803593 4710 generic.go:334] "Generic (PLEG): container finished" podID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" exitCode=0 Oct 09 09:29:35 crc kubenswrapper[4710]: I1009 09:29:35.803665 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerDied","Data":"d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179"} Oct 09 09:29:35 crc kubenswrapper[4710]: I1009 09:29:35.803979 4710 scope.go:117] "RemoveContainer" containerID="45c1c663886c1898255d44177db40405bdd2ff57beb008c51e435da6768e1c12" Oct 09 09:29:35 crc kubenswrapper[4710]: I1009 09:29:35.804631 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:29:35 crc kubenswrapper[4710]: E1009 09:29:35.805027 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:29:36 crc kubenswrapper[4710]: I1009 09:29:36.176822 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4" Oct 09 09:29:36 crc kubenswrapper[4710]: I1009 09:29:36.283870 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bcmgz\" (UniqueName: \"kubernetes.io/projected/8d7c8160-10f5-4908-a278-d8265862ef51-kube-api-access-bcmgz\") pod \"8d7c8160-10f5-4908-a278-d8265862ef51\" (UID: \"8d7c8160-10f5-4908-a278-d8265862ef51\") " Oct 09 09:29:36 crc kubenswrapper[4710]: I1009 09:29:36.283963 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d7c8160-10f5-4908-a278-d8265862ef51-ssh-key\") pod \"8d7c8160-10f5-4908-a278-d8265862ef51\" (UID: \"8d7c8160-10f5-4908-a278-d8265862ef51\") " Oct 09 09:29:36 crc kubenswrapper[4710]: I1009 09:29:36.284068 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d7c8160-10f5-4908-a278-d8265862ef51-inventory\") pod \"8d7c8160-10f5-4908-a278-d8265862ef51\" (UID: \"8d7c8160-10f5-4908-a278-d8265862ef51\") " Oct 09 09:29:36 crc kubenswrapper[4710]: I1009 09:29:36.302906 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d7c8160-10f5-4908-a278-d8265862ef51-kube-api-access-bcmgz" (OuterVolumeSpecName: "kube-api-access-bcmgz") pod "8d7c8160-10f5-4908-a278-d8265862ef51" (UID: "8d7c8160-10f5-4908-a278-d8265862ef51"). InnerVolumeSpecName "kube-api-access-bcmgz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:29:36 crc kubenswrapper[4710]: I1009 09:29:36.311247 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d7c8160-10f5-4908-a278-d8265862ef51-inventory" (OuterVolumeSpecName: "inventory") pod "8d7c8160-10f5-4908-a278-d8265862ef51" (UID: "8d7c8160-10f5-4908-a278-d8265862ef51"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:29:36 crc kubenswrapper[4710]: I1009 09:29:36.317150 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d7c8160-10f5-4908-a278-d8265862ef51-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8d7c8160-10f5-4908-a278-d8265862ef51" (UID: "8d7c8160-10f5-4908-a278-d8265862ef51"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:29:36 crc kubenswrapper[4710]: I1009 09:29:36.387217 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bcmgz\" (UniqueName: \"kubernetes.io/projected/8d7c8160-10f5-4908-a278-d8265862ef51-kube-api-access-bcmgz\") on node \"crc\" DevicePath \"\"" Oct 09 09:29:36 crc kubenswrapper[4710]: I1009 09:29:36.387253 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d7c8160-10f5-4908-a278-d8265862ef51-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:29:36 crc kubenswrapper[4710]: I1009 09:29:36.387265 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d7c8160-10f5-4908-a278-d8265862ef51-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:29:36 crc kubenswrapper[4710]: I1009 09:29:36.814002 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4" event={"ID":"8d7c8160-10f5-4908-a278-d8265862ef51","Type":"ContainerDied","Data":"53386bb17f8234df492d3b636e6bc5cfdf7ba4a6c079ec445239cee12e91d63d"} Oct 09 09:29:36 crc kubenswrapper[4710]: I1009 09:29:36.814035 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53386bb17f8234df492d3b636e6bc5cfdf7ba4a6c079ec445239cee12e91d63d" Oct 09 09:29:36 crc kubenswrapper[4710]: I1009 09:29:36.814083 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4" Oct 09 09:29:36 crc kubenswrapper[4710]: I1009 09:29:36.927600 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls"] Oct 09 09:29:36 crc kubenswrapper[4710]: E1009 09:29:36.927987 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d7c8160-10f5-4908-a278-d8265862ef51" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:29:36 crc kubenswrapper[4710]: I1009 09:29:36.928006 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d7c8160-10f5-4908-a278-d8265862ef51" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:29:36 crc kubenswrapper[4710]: I1009 09:29:36.928209 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d7c8160-10f5-4908-a278-d8265862ef51" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:29:36 crc kubenswrapper[4710]: I1009 09:29:36.928841 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls" Oct 09 09:29:36 crc kubenswrapper[4710]: I1009 09:29:36.930371 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:29:36 crc kubenswrapper[4710]: I1009 09:29:36.932036 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:29:36 crc kubenswrapper[4710]: I1009 09:29:36.932054 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:29:36 crc kubenswrapper[4710]: I1009 09:29:36.932949 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:29:36 crc kubenswrapper[4710]: I1009 09:29:36.952785 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls"] Oct 09 09:29:37 crc kubenswrapper[4710]: I1009 09:29:37.104261 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d24c1e7-935b-4892-8208-b85a8f841f73-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls\" (UID: \"0d24c1e7-935b-4892-8208-b85a8f841f73\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls" Oct 09 09:29:37 crc kubenswrapper[4710]: I1009 09:29:37.104542 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xg9p6\" (UniqueName: \"kubernetes.io/projected/0d24c1e7-935b-4892-8208-b85a8f841f73-kube-api-access-xg9p6\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls\" (UID: \"0d24c1e7-935b-4892-8208-b85a8f841f73\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls" Oct 09 09:29:37 crc kubenswrapper[4710]: I1009 09:29:37.104701 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d24c1e7-935b-4892-8208-b85a8f841f73-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls\" (UID: \"0d24c1e7-935b-4892-8208-b85a8f841f73\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls" Oct 09 09:29:37 crc kubenswrapper[4710]: I1009 09:29:37.208108 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xg9p6\" (UniqueName: \"kubernetes.io/projected/0d24c1e7-935b-4892-8208-b85a8f841f73-kube-api-access-xg9p6\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls\" (UID: \"0d24c1e7-935b-4892-8208-b85a8f841f73\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls" Oct 09 09:29:37 crc kubenswrapper[4710]: I1009 09:29:37.208304 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d24c1e7-935b-4892-8208-b85a8f841f73-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls\" (UID: \"0d24c1e7-935b-4892-8208-b85a8f841f73\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls" Oct 09 09:29:37 crc kubenswrapper[4710]: I1009 09:29:37.208502 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d24c1e7-935b-4892-8208-b85a8f841f73-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls\" (UID: \"0d24c1e7-935b-4892-8208-b85a8f841f73\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls" Oct 09 09:29:37 crc kubenswrapper[4710]: I1009 09:29:37.214741 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d24c1e7-935b-4892-8208-b85a8f841f73-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls\" (UID: \"0d24c1e7-935b-4892-8208-b85a8f841f73\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls" Oct 09 09:29:37 crc kubenswrapper[4710]: I1009 09:29:37.223633 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d24c1e7-935b-4892-8208-b85a8f841f73-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls\" (UID: \"0d24c1e7-935b-4892-8208-b85a8f841f73\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls" Oct 09 09:29:37 crc kubenswrapper[4710]: I1009 09:29:37.225799 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xg9p6\" (UniqueName: \"kubernetes.io/projected/0d24c1e7-935b-4892-8208-b85a8f841f73-kube-api-access-xg9p6\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls\" (UID: \"0d24c1e7-935b-4892-8208-b85a8f841f73\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls" Oct 09 09:29:37 crc kubenswrapper[4710]: I1009 09:29:37.248326 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls" Oct 09 09:29:37 crc kubenswrapper[4710]: I1009 09:29:37.363129 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bj6rp" Oct 09 09:29:37 crc kubenswrapper[4710]: I1009 09:29:37.364932 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bj6rp" Oct 09 09:29:37 crc kubenswrapper[4710]: I1009 09:29:37.417038 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bj6rp" Oct 09 09:29:37 crc kubenswrapper[4710]: I1009 09:29:37.722129 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls"] Oct 09 09:29:37 crc kubenswrapper[4710]: I1009 09:29:37.833238 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls" event={"ID":"0d24c1e7-935b-4892-8208-b85a8f841f73","Type":"ContainerStarted","Data":"11bac8ab8212d3a060f543da1a5d1797ad5fad099828b07a331fc9439da285f6"} Oct 09 09:29:37 crc kubenswrapper[4710]: I1009 09:29:37.868225 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bj6rp" Oct 09 09:29:37 crc kubenswrapper[4710]: I1009 09:29:37.915517 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bj6rp"] Oct 09 09:29:38 crc kubenswrapper[4710]: I1009 09:29:38.845601 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls" event={"ID":"0d24c1e7-935b-4892-8208-b85a8f841f73","Type":"ContainerStarted","Data":"e8fa7ea86cf30fecd8c06fd174461fa4cbf3c29c9c9829d70175551ccf5efb57"} Oct 09 09:29:38 crc kubenswrapper[4710]: I1009 09:29:38.870177 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls" podStartSLOduration=2.352891247 podStartE2EDuration="2.870142256s" podCreationTimestamp="2025-10-09 09:29:36 +0000 UTC" firstStartedPulling="2025-10-09 09:29:37.738711017 +0000 UTC m=+1501.228819414" lastFinishedPulling="2025-10-09 09:29:38.255962026 +0000 UTC m=+1501.746070423" observedRunningTime="2025-10-09 09:29:38.867076274 +0000 UTC m=+1502.357184671" watchObservedRunningTime="2025-10-09 09:29:38.870142256 +0000 UTC m=+1502.360250653" Oct 09 09:29:39 crc kubenswrapper[4710]: I1009 09:29:39.851324 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bj6rp" podUID="e5bb1b46-357b-43a1-be9d-d5a16317fbb9" containerName="registry-server" containerID="cri-o://d50a47a24d78d39bea8429362111238b0483bab4b714a4298ec2fe73dde57e3f" gracePeriod=2 Oct 09 09:29:40 crc kubenswrapper[4710]: I1009 09:29:40.250013 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bj6rp" Oct 09 09:29:40 crc kubenswrapper[4710]: I1009 09:29:40.382081 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5bb1b46-357b-43a1-be9d-d5a16317fbb9-utilities\") pod \"e5bb1b46-357b-43a1-be9d-d5a16317fbb9\" (UID: \"e5bb1b46-357b-43a1-be9d-d5a16317fbb9\") " Oct 09 09:29:40 crc kubenswrapper[4710]: I1009 09:29:40.382174 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txlvs\" (UniqueName: \"kubernetes.io/projected/e5bb1b46-357b-43a1-be9d-d5a16317fbb9-kube-api-access-txlvs\") pod \"e5bb1b46-357b-43a1-be9d-d5a16317fbb9\" (UID: \"e5bb1b46-357b-43a1-be9d-d5a16317fbb9\") " Oct 09 09:29:40 crc kubenswrapper[4710]: I1009 09:29:40.382264 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5bb1b46-357b-43a1-be9d-d5a16317fbb9-catalog-content\") pod \"e5bb1b46-357b-43a1-be9d-d5a16317fbb9\" (UID: \"e5bb1b46-357b-43a1-be9d-d5a16317fbb9\") " Oct 09 09:29:40 crc kubenswrapper[4710]: I1009 09:29:40.383846 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5bb1b46-357b-43a1-be9d-d5a16317fbb9-utilities" (OuterVolumeSpecName: "utilities") pod "e5bb1b46-357b-43a1-be9d-d5a16317fbb9" (UID: "e5bb1b46-357b-43a1-be9d-d5a16317fbb9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:29:40 crc kubenswrapper[4710]: I1009 09:29:40.392630 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5bb1b46-357b-43a1-be9d-d5a16317fbb9-kube-api-access-txlvs" (OuterVolumeSpecName: "kube-api-access-txlvs") pod "e5bb1b46-357b-43a1-be9d-d5a16317fbb9" (UID: "e5bb1b46-357b-43a1-be9d-d5a16317fbb9"). InnerVolumeSpecName "kube-api-access-txlvs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:29:40 crc kubenswrapper[4710]: I1009 09:29:40.418318 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5bb1b46-357b-43a1-be9d-d5a16317fbb9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e5bb1b46-357b-43a1-be9d-d5a16317fbb9" (UID: "e5bb1b46-357b-43a1-be9d-d5a16317fbb9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:29:40 crc kubenswrapper[4710]: I1009 09:29:40.484301 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5bb1b46-357b-43a1-be9d-d5a16317fbb9-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:29:40 crc kubenswrapper[4710]: I1009 09:29:40.484330 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txlvs\" (UniqueName: \"kubernetes.io/projected/e5bb1b46-357b-43a1-be9d-d5a16317fbb9-kube-api-access-txlvs\") on node \"crc\" DevicePath \"\"" Oct 09 09:29:40 crc kubenswrapper[4710]: I1009 09:29:40.484341 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5bb1b46-357b-43a1-be9d-d5a16317fbb9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:29:40 crc kubenswrapper[4710]: I1009 09:29:40.867142 4710 generic.go:334] "Generic (PLEG): container finished" podID="e5bb1b46-357b-43a1-be9d-d5a16317fbb9" containerID="d50a47a24d78d39bea8429362111238b0483bab4b714a4298ec2fe73dde57e3f" exitCode=0 Oct 09 09:29:40 crc kubenswrapper[4710]: I1009 09:29:40.867178 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bj6rp" Oct 09 09:29:40 crc kubenswrapper[4710]: I1009 09:29:40.867228 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bj6rp" event={"ID":"e5bb1b46-357b-43a1-be9d-d5a16317fbb9","Type":"ContainerDied","Data":"d50a47a24d78d39bea8429362111238b0483bab4b714a4298ec2fe73dde57e3f"} Oct 09 09:29:40 crc kubenswrapper[4710]: I1009 09:29:40.867562 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bj6rp" event={"ID":"e5bb1b46-357b-43a1-be9d-d5a16317fbb9","Type":"ContainerDied","Data":"863f5c32ac2e221228d7f1d7a965611b12d6c295dface2e8b3b86a760dea3f9a"} Oct 09 09:29:40 crc kubenswrapper[4710]: I1009 09:29:40.867584 4710 scope.go:117] "RemoveContainer" containerID="d50a47a24d78d39bea8429362111238b0483bab4b714a4298ec2fe73dde57e3f" Oct 09 09:29:40 crc kubenswrapper[4710]: I1009 09:29:40.917482 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bj6rp"] Oct 09 09:29:40 crc kubenswrapper[4710]: I1009 09:29:40.923138 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bj6rp"] Oct 09 09:29:40 crc kubenswrapper[4710]: I1009 09:29:40.928625 4710 scope.go:117] "RemoveContainer" containerID="5a563a5f6d249986597e80934aa0f77a28e6c3fc3badfbf666b1e15b3332cb6a" Oct 09 09:29:40 crc kubenswrapper[4710]: I1009 09:29:40.976968 4710 scope.go:117] "RemoveContainer" containerID="2175b6226d1755db1b8d2adef2124be79701903bea76a2fe1c07b09cd36eb155" Oct 09 09:29:41 crc kubenswrapper[4710]: I1009 09:29:41.006293 4710 scope.go:117] "RemoveContainer" containerID="d50a47a24d78d39bea8429362111238b0483bab4b714a4298ec2fe73dde57e3f" Oct 09 09:29:41 crc kubenswrapper[4710]: E1009 09:29:41.006776 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d50a47a24d78d39bea8429362111238b0483bab4b714a4298ec2fe73dde57e3f\": container with ID starting with d50a47a24d78d39bea8429362111238b0483bab4b714a4298ec2fe73dde57e3f not found: ID does not exist" containerID="d50a47a24d78d39bea8429362111238b0483bab4b714a4298ec2fe73dde57e3f" Oct 09 09:29:41 crc kubenswrapper[4710]: I1009 09:29:41.006805 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d50a47a24d78d39bea8429362111238b0483bab4b714a4298ec2fe73dde57e3f"} err="failed to get container status \"d50a47a24d78d39bea8429362111238b0483bab4b714a4298ec2fe73dde57e3f\": rpc error: code = NotFound desc = could not find container \"d50a47a24d78d39bea8429362111238b0483bab4b714a4298ec2fe73dde57e3f\": container with ID starting with d50a47a24d78d39bea8429362111238b0483bab4b714a4298ec2fe73dde57e3f not found: ID does not exist" Oct 09 09:29:41 crc kubenswrapper[4710]: I1009 09:29:41.006827 4710 scope.go:117] "RemoveContainer" containerID="5a563a5f6d249986597e80934aa0f77a28e6c3fc3badfbf666b1e15b3332cb6a" Oct 09 09:29:41 crc kubenswrapper[4710]: E1009 09:29:41.008901 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a563a5f6d249986597e80934aa0f77a28e6c3fc3badfbf666b1e15b3332cb6a\": container with ID starting with 5a563a5f6d249986597e80934aa0f77a28e6c3fc3badfbf666b1e15b3332cb6a not found: ID does not exist" containerID="5a563a5f6d249986597e80934aa0f77a28e6c3fc3badfbf666b1e15b3332cb6a" Oct 09 09:29:41 crc kubenswrapper[4710]: I1009 09:29:41.008930 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a563a5f6d249986597e80934aa0f77a28e6c3fc3badfbf666b1e15b3332cb6a"} err="failed to get container status \"5a563a5f6d249986597e80934aa0f77a28e6c3fc3badfbf666b1e15b3332cb6a\": rpc error: code = NotFound desc = could not find container \"5a563a5f6d249986597e80934aa0f77a28e6c3fc3badfbf666b1e15b3332cb6a\": container with ID starting with 5a563a5f6d249986597e80934aa0f77a28e6c3fc3badfbf666b1e15b3332cb6a not found: ID does not exist" Oct 09 09:29:41 crc kubenswrapper[4710]: I1009 09:29:41.008946 4710 scope.go:117] "RemoveContainer" containerID="2175b6226d1755db1b8d2adef2124be79701903bea76a2fe1c07b09cd36eb155" Oct 09 09:29:41 crc kubenswrapper[4710]: E1009 09:29:41.009173 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2175b6226d1755db1b8d2adef2124be79701903bea76a2fe1c07b09cd36eb155\": container with ID starting with 2175b6226d1755db1b8d2adef2124be79701903bea76a2fe1c07b09cd36eb155 not found: ID does not exist" containerID="2175b6226d1755db1b8d2adef2124be79701903bea76a2fe1c07b09cd36eb155" Oct 09 09:29:41 crc kubenswrapper[4710]: I1009 09:29:41.009198 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2175b6226d1755db1b8d2adef2124be79701903bea76a2fe1c07b09cd36eb155"} err="failed to get container status \"2175b6226d1755db1b8d2adef2124be79701903bea76a2fe1c07b09cd36eb155\": rpc error: code = NotFound desc = could not find container \"2175b6226d1755db1b8d2adef2124be79701903bea76a2fe1c07b09cd36eb155\": container with ID starting with 2175b6226d1755db1b8d2adef2124be79701903bea76a2fe1c07b09cd36eb155 not found: ID does not exist" Oct 09 09:29:42 crc kubenswrapper[4710]: I1009 09:29:42.032799 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-lzhjx"] Oct 09 09:29:42 crc kubenswrapper[4710]: I1009 09:29:42.042872 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-8wst4"] Oct 09 09:29:42 crc kubenswrapper[4710]: I1009 09:29:42.053674 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-8wst4"] Oct 09 09:29:42 crc kubenswrapper[4710]: I1009 09:29:42.063583 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-lzhjx"] Oct 09 09:29:42 crc kubenswrapper[4710]: I1009 09:29:42.072291 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-khqb9"] Oct 09 09:29:42 crc kubenswrapper[4710]: I1009 09:29:42.077550 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-khqb9"] Oct 09 09:29:42 crc kubenswrapper[4710]: I1009 09:29:42.822724 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="169a7e02-2da0-44b5-916f-2d10ad521e61" path="/var/lib/kubelet/pods/169a7e02-2da0-44b5-916f-2d10ad521e61/volumes" Oct 09 09:29:42 crc kubenswrapper[4710]: I1009 09:29:42.823508 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="997f4298-9244-4ef3-8783-c4e68d569407" path="/var/lib/kubelet/pods/997f4298-9244-4ef3-8783-c4e68d569407/volumes" Oct 09 09:29:42 crc kubenswrapper[4710]: I1009 09:29:42.824018 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aae244ab-69b2-4a70-bf4a-c0f4bbf6ec9a" path="/var/lib/kubelet/pods/aae244ab-69b2-4a70-bf4a-c0f4bbf6ec9a/volumes" Oct 09 09:29:42 crc kubenswrapper[4710]: I1009 09:29:42.824524 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5bb1b46-357b-43a1-be9d-d5a16317fbb9" path="/var/lib/kubelet/pods/e5bb1b46-357b-43a1-be9d-d5a16317fbb9/volumes" Oct 09 09:29:42 crc kubenswrapper[4710]: I1009 09:29:42.884423 4710 generic.go:334] "Generic (PLEG): container finished" podID="0d24c1e7-935b-4892-8208-b85a8f841f73" containerID="e8fa7ea86cf30fecd8c06fd174461fa4cbf3c29c9c9829d70175551ccf5efb57" exitCode=0 Oct 09 09:29:42 crc kubenswrapper[4710]: I1009 09:29:42.884490 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls" event={"ID":"0d24c1e7-935b-4892-8208-b85a8f841f73","Type":"ContainerDied","Data":"e8fa7ea86cf30fecd8c06fd174461fa4cbf3c29c9c9829d70175551ccf5efb57"} Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.189616 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls" Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.348972 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d24c1e7-935b-4892-8208-b85a8f841f73-ssh-key\") pod \"0d24c1e7-935b-4892-8208-b85a8f841f73\" (UID: \"0d24c1e7-935b-4892-8208-b85a8f841f73\") " Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.349088 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xg9p6\" (UniqueName: \"kubernetes.io/projected/0d24c1e7-935b-4892-8208-b85a8f841f73-kube-api-access-xg9p6\") pod \"0d24c1e7-935b-4892-8208-b85a8f841f73\" (UID: \"0d24c1e7-935b-4892-8208-b85a8f841f73\") " Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.349189 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d24c1e7-935b-4892-8208-b85a8f841f73-inventory\") pod \"0d24c1e7-935b-4892-8208-b85a8f841f73\" (UID: \"0d24c1e7-935b-4892-8208-b85a8f841f73\") " Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.353747 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d24c1e7-935b-4892-8208-b85a8f841f73-kube-api-access-xg9p6" (OuterVolumeSpecName: "kube-api-access-xg9p6") pod "0d24c1e7-935b-4892-8208-b85a8f841f73" (UID: "0d24c1e7-935b-4892-8208-b85a8f841f73"). InnerVolumeSpecName "kube-api-access-xg9p6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.369651 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d24c1e7-935b-4892-8208-b85a8f841f73-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0d24c1e7-935b-4892-8208-b85a8f841f73" (UID: "0d24c1e7-935b-4892-8208-b85a8f841f73"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.370698 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d24c1e7-935b-4892-8208-b85a8f841f73-inventory" (OuterVolumeSpecName: "inventory") pod "0d24c1e7-935b-4892-8208-b85a8f841f73" (UID: "0d24c1e7-935b-4892-8208-b85a8f841f73"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.451347 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xg9p6\" (UniqueName: \"kubernetes.io/projected/0d24c1e7-935b-4892-8208-b85a8f841f73-kube-api-access-xg9p6\") on node \"crc\" DevicePath \"\"" Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.451369 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d24c1e7-935b-4892-8208-b85a8f841f73-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.451378 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0d24c1e7-935b-4892-8208-b85a8f841f73-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.897826 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls" event={"ID":"0d24c1e7-935b-4892-8208-b85a8f841f73","Type":"ContainerDied","Data":"11bac8ab8212d3a060f543da1a5d1797ad5fad099828b07a331fc9439da285f6"} Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.897861 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="11bac8ab8212d3a060f543da1a5d1797ad5fad099828b07a331fc9439da285f6" Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.897861 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls" Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.946969 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn"] Oct 09 09:29:44 crc kubenswrapper[4710]: E1009 09:29:44.947266 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5bb1b46-357b-43a1-be9d-d5a16317fbb9" containerName="extract-content" Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.947282 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5bb1b46-357b-43a1-be9d-d5a16317fbb9" containerName="extract-content" Oct 09 09:29:44 crc kubenswrapper[4710]: E1009 09:29:44.947293 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5bb1b46-357b-43a1-be9d-d5a16317fbb9" containerName="registry-server" Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.947299 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5bb1b46-357b-43a1-be9d-d5a16317fbb9" containerName="registry-server" Oct 09 09:29:44 crc kubenswrapper[4710]: E1009 09:29:44.947326 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5bb1b46-357b-43a1-be9d-d5a16317fbb9" containerName="extract-utilities" Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.947332 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5bb1b46-357b-43a1-be9d-d5a16317fbb9" containerName="extract-utilities" Oct 09 09:29:44 crc kubenswrapper[4710]: E1009 09:29:44.947344 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d24c1e7-935b-4892-8208-b85a8f841f73" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.947364 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d24c1e7-935b-4892-8208-b85a8f841f73" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.947527 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5bb1b46-357b-43a1-be9d-d5a16317fbb9" containerName="registry-server" Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.947542 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d24c1e7-935b-4892-8208-b85a8f841f73" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.948058 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn" Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.950138 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.950380 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.950415 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.951235 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:29:44 crc kubenswrapper[4710]: I1009 09:29:44.966619 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn"] Oct 09 09:29:45 crc kubenswrapper[4710]: I1009 09:29:45.060345 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dpvg\" (UniqueName: \"kubernetes.io/projected/80b3492a-68c2-40a4-b164-2a0769a825bf-kube-api-access-8dpvg\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-st4fn\" (UID: \"80b3492a-68c2-40a4-b164-2a0769a825bf\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn" Oct 09 09:29:45 crc kubenswrapper[4710]: I1009 09:29:45.060782 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/80b3492a-68c2-40a4-b164-2a0769a825bf-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-st4fn\" (UID: \"80b3492a-68c2-40a4-b164-2a0769a825bf\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn" Oct 09 09:29:45 crc kubenswrapper[4710]: I1009 09:29:45.060991 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/80b3492a-68c2-40a4-b164-2a0769a825bf-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-st4fn\" (UID: \"80b3492a-68c2-40a4-b164-2a0769a825bf\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn" Oct 09 09:29:45 crc kubenswrapper[4710]: I1009 09:29:45.162649 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dpvg\" (UniqueName: \"kubernetes.io/projected/80b3492a-68c2-40a4-b164-2a0769a825bf-kube-api-access-8dpvg\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-st4fn\" (UID: \"80b3492a-68c2-40a4-b164-2a0769a825bf\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn" Oct 09 09:29:45 crc kubenswrapper[4710]: I1009 09:29:45.162767 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/80b3492a-68c2-40a4-b164-2a0769a825bf-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-st4fn\" (UID: \"80b3492a-68c2-40a4-b164-2a0769a825bf\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn" Oct 09 09:29:45 crc kubenswrapper[4710]: I1009 09:29:45.162918 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/80b3492a-68c2-40a4-b164-2a0769a825bf-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-st4fn\" (UID: \"80b3492a-68c2-40a4-b164-2a0769a825bf\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn" Oct 09 09:29:45 crc kubenswrapper[4710]: I1009 09:29:45.166926 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/80b3492a-68c2-40a4-b164-2a0769a825bf-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-st4fn\" (UID: \"80b3492a-68c2-40a4-b164-2a0769a825bf\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn" Oct 09 09:29:45 crc kubenswrapper[4710]: I1009 09:29:45.167002 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/80b3492a-68c2-40a4-b164-2a0769a825bf-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-st4fn\" (UID: \"80b3492a-68c2-40a4-b164-2a0769a825bf\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn" Oct 09 09:29:45 crc kubenswrapper[4710]: I1009 09:29:45.184996 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dpvg\" (UniqueName: \"kubernetes.io/projected/80b3492a-68c2-40a4-b164-2a0769a825bf-kube-api-access-8dpvg\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-st4fn\" (UID: \"80b3492a-68c2-40a4-b164-2a0769a825bf\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn" Oct 09 09:29:45 crc kubenswrapper[4710]: I1009 09:29:45.260413 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn" Oct 09 09:29:45 crc kubenswrapper[4710]: I1009 09:29:45.744407 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn"] Oct 09 09:29:45 crc kubenswrapper[4710]: I1009 09:29:45.905157 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn" event={"ID":"80b3492a-68c2-40a4-b164-2a0769a825bf","Type":"ContainerStarted","Data":"02d745b443639195630cbe4088e0bf20fa02a72b99133eea274bcc7ca87bcbe2"} Oct 09 09:29:46 crc kubenswrapper[4710]: I1009 09:29:46.585027 4710 scope.go:117] "RemoveContainer" containerID="fad02ba2af5668af8bfff3f587a1373229a3015a4a366b720f2a90fc0058146c" Oct 09 09:29:46 crc kubenswrapper[4710]: I1009 09:29:46.602937 4710 scope.go:117] "RemoveContainer" containerID="726fc2063daddfb016c335b11fb00bbbe4f98f582ca4b36f4199234e97eaec84" Oct 09 09:29:46 crc kubenswrapper[4710]: I1009 09:29:46.632701 4710 scope.go:117] "RemoveContainer" containerID="e8b6efdad85b29380f15ed8793f18c76d3c1669d0996d208e4a0f71a83ebdee1" Oct 09 09:29:46 crc kubenswrapper[4710]: I1009 09:29:46.912135 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn" event={"ID":"80b3492a-68c2-40a4-b164-2a0769a825bf","Type":"ContainerStarted","Data":"a611cb56e1fc4bec7f0ee1327c93ffdba9a2f7628e249194e841d50edc4cc94a"} Oct 09 09:29:46 crc kubenswrapper[4710]: I1009 09:29:46.928994 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn" podStartSLOduration=2.429847234 podStartE2EDuration="2.928978483s" podCreationTimestamp="2025-10-09 09:29:44 +0000 UTC" firstStartedPulling="2025-10-09 09:29:45.754000183 +0000 UTC m=+1509.244108580" lastFinishedPulling="2025-10-09 09:29:46.253131432 +0000 UTC m=+1509.743239829" observedRunningTime="2025-10-09 09:29:46.924941601 +0000 UTC m=+1510.415050018" watchObservedRunningTime="2025-10-09 09:29:46.928978483 +0000 UTC m=+1510.419086880" Oct 09 09:29:49 crc kubenswrapper[4710]: I1009 09:29:49.815749 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:29:49 crc kubenswrapper[4710]: E1009 09:29:49.816120 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:29:50 crc kubenswrapper[4710]: I1009 09:29:50.289124 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xk7pp"] Oct 09 09:29:50 crc kubenswrapper[4710]: I1009 09:29:50.290729 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xk7pp" Oct 09 09:29:50 crc kubenswrapper[4710]: I1009 09:29:50.298111 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xk7pp"] Oct 09 09:29:50 crc kubenswrapper[4710]: I1009 09:29:50.451306 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcwgw\" (UniqueName: \"kubernetes.io/projected/6721ad8d-e494-4c43-aecf-8393ad262cdd-kube-api-access-qcwgw\") pod \"redhat-operators-xk7pp\" (UID: \"6721ad8d-e494-4c43-aecf-8393ad262cdd\") " pod="openshift-marketplace/redhat-operators-xk7pp" Oct 09 09:29:50 crc kubenswrapper[4710]: I1009 09:29:50.451376 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6721ad8d-e494-4c43-aecf-8393ad262cdd-utilities\") pod \"redhat-operators-xk7pp\" (UID: \"6721ad8d-e494-4c43-aecf-8393ad262cdd\") " pod="openshift-marketplace/redhat-operators-xk7pp" Oct 09 09:29:50 crc kubenswrapper[4710]: I1009 09:29:50.451466 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6721ad8d-e494-4c43-aecf-8393ad262cdd-catalog-content\") pod \"redhat-operators-xk7pp\" (UID: \"6721ad8d-e494-4c43-aecf-8393ad262cdd\") " pod="openshift-marketplace/redhat-operators-xk7pp" Oct 09 09:29:50 crc kubenswrapper[4710]: I1009 09:29:50.552729 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcwgw\" (UniqueName: \"kubernetes.io/projected/6721ad8d-e494-4c43-aecf-8393ad262cdd-kube-api-access-qcwgw\") pod \"redhat-operators-xk7pp\" (UID: \"6721ad8d-e494-4c43-aecf-8393ad262cdd\") " pod="openshift-marketplace/redhat-operators-xk7pp" Oct 09 09:29:50 crc kubenswrapper[4710]: I1009 09:29:50.552780 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6721ad8d-e494-4c43-aecf-8393ad262cdd-utilities\") pod \"redhat-operators-xk7pp\" (UID: \"6721ad8d-e494-4c43-aecf-8393ad262cdd\") " pod="openshift-marketplace/redhat-operators-xk7pp" Oct 09 09:29:50 crc kubenswrapper[4710]: I1009 09:29:50.552833 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6721ad8d-e494-4c43-aecf-8393ad262cdd-catalog-content\") pod \"redhat-operators-xk7pp\" (UID: \"6721ad8d-e494-4c43-aecf-8393ad262cdd\") " pod="openshift-marketplace/redhat-operators-xk7pp" Oct 09 09:29:50 crc kubenswrapper[4710]: I1009 09:29:50.553208 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6721ad8d-e494-4c43-aecf-8393ad262cdd-catalog-content\") pod \"redhat-operators-xk7pp\" (UID: \"6721ad8d-e494-4c43-aecf-8393ad262cdd\") " pod="openshift-marketplace/redhat-operators-xk7pp" Oct 09 09:29:50 crc kubenswrapper[4710]: I1009 09:29:50.553701 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6721ad8d-e494-4c43-aecf-8393ad262cdd-utilities\") pod \"redhat-operators-xk7pp\" (UID: \"6721ad8d-e494-4c43-aecf-8393ad262cdd\") " pod="openshift-marketplace/redhat-operators-xk7pp" Oct 09 09:29:50 crc kubenswrapper[4710]: I1009 09:29:50.569198 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcwgw\" (UniqueName: \"kubernetes.io/projected/6721ad8d-e494-4c43-aecf-8393ad262cdd-kube-api-access-qcwgw\") pod \"redhat-operators-xk7pp\" (UID: \"6721ad8d-e494-4c43-aecf-8393ad262cdd\") " pod="openshift-marketplace/redhat-operators-xk7pp" Oct 09 09:29:50 crc kubenswrapper[4710]: I1009 09:29:50.611080 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xk7pp" Oct 09 09:29:51 crc kubenswrapper[4710]: I1009 09:29:51.057487 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xk7pp"] Oct 09 09:29:51 crc kubenswrapper[4710]: W1009 09:29:51.061622 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6721ad8d_e494_4c43_aecf_8393ad262cdd.slice/crio-0ac6b6437f6c202a4b299e46d1f5f398f77fe5ab986e4c831d46bbe8486e0107 WatchSource:0}: Error finding container 0ac6b6437f6c202a4b299e46d1f5f398f77fe5ab986e4c831d46bbe8486e0107: Status 404 returned error can't find the container with id 0ac6b6437f6c202a4b299e46d1f5f398f77fe5ab986e4c831d46bbe8486e0107 Oct 09 09:29:51 crc kubenswrapper[4710]: I1009 09:29:51.943166 4710 generic.go:334] "Generic (PLEG): container finished" podID="6721ad8d-e494-4c43-aecf-8393ad262cdd" containerID="b9ad0017a8e9bb4acca5d628669b86775be401d1faf9cae8cee2ffd263dcc31b" exitCode=0 Oct 09 09:29:51 crc kubenswrapper[4710]: I1009 09:29:51.943208 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xk7pp" event={"ID":"6721ad8d-e494-4c43-aecf-8393ad262cdd","Type":"ContainerDied","Data":"b9ad0017a8e9bb4acca5d628669b86775be401d1faf9cae8cee2ffd263dcc31b"} Oct 09 09:29:51 crc kubenswrapper[4710]: I1009 09:29:51.943369 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xk7pp" event={"ID":"6721ad8d-e494-4c43-aecf-8393ad262cdd","Type":"ContainerStarted","Data":"0ac6b6437f6c202a4b299e46d1f5f398f77fe5ab986e4c831d46bbe8486e0107"} Oct 09 09:29:52 crc kubenswrapper[4710]: I1009 09:29:52.024074 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-b50b-account-create-dg79d"] Oct 09 09:29:52 crc kubenswrapper[4710]: I1009 09:29:52.030174 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bf9b-account-create-q69nf"] Oct 09 09:29:52 crc kubenswrapper[4710]: I1009 09:29:52.037154 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-b50b-account-create-dg79d"] Oct 09 09:29:52 crc kubenswrapper[4710]: I1009 09:29:52.042718 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bf9b-account-create-q69nf"] Oct 09 09:29:52 crc kubenswrapper[4710]: I1009 09:29:52.825246 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6cfbb538-c79f-4a92-8f7e-0ddffac2b192" path="/var/lib/kubelet/pods/6cfbb538-c79f-4a92-8f7e-0ddffac2b192/volumes" Oct 09 09:29:52 crc kubenswrapper[4710]: I1009 09:29:52.826105 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e43c5ce1-db3e-4341-b902-ebc890e268b0" path="/var/lib/kubelet/pods/e43c5ce1-db3e-4341-b902-ebc890e268b0/volumes" Oct 09 09:29:53 crc kubenswrapper[4710]: I1009 09:29:53.021453 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-bb96-account-create-hw97d"] Oct 09 09:29:53 crc kubenswrapper[4710]: I1009 09:29:53.028173 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-bb96-account-create-hw97d"] Oct 09 09:29:53 crc kubenswrapper[4710]: I1009 09:29:53.958072 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xk7pp" event={"ID":"6721ad8d-e494-4c43-aecf-8393ad262cdd","Type":"ContainerStarted","Data":"f8369d7f62fac2408f6736326a1c474ef5c9fadc74da20637a39ac3e2d9879fb"} Oct 09 09:29:54 crc kubenswrapper[4710]: I1009 09:29:54.825264 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a27c895-0dd6-4d32-9572-e7a52ff1abbf" path="/var/lib/kubelet/pods/4a27c895-0dd6-4d32-9572-e7a52ff1abbf/volumes" Oct 09 09:29:55 crc kubenswrapper[4710]: I1009 09:29:55.979092 4710 generic.go:334] "Generic (PLEG): container finished" podID="6721ad8d-e494-4c43-aecf-8393ad262cdd" containerID="f8369d7f62fac2408f6736326a1c474ef5c9fadc74da20637a39ac3e2d9879fb" exitCode=0 Oct 09 09:29:55 crc kubenswrapper[4710]: I1009 09:29:55.979181 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xk7pp" event={"ID":"6721ad8d-e494-4c43-aecf-8393ad262cdd","Type":"ContainerDied","Data":"f8369d7f62fac2408f6736326a1c474ef5c9fadc74da20637a39ac3e2d9879fb"} Oct 09 09:29:56 crc kubenswrapper[4710]: I1009 09:29:56.988896 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xk7pp" event={"ID":"6721ad8d-e494-4c43-aecf-8393ad262cdd","Type":"ContainerStarted","Data":"6746cd083c4673b16583fd063d4af2d243f4e74567b15d6ea10803b295133097"} Oct 09 09:29:57 crc kubenswrapper[4710]: I1009 09:29:57.004692 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xk7pp" podStartSLOduration=2.246225994 podStartE2EDuration="7.00467227s" podCreationTimestamp="2025-10-09 09:29:50 +0000 UTC" firstStartedPulling="2025-10-09 09:29:51.944493964 +0000 UTC m=+1515.434602361" lastFinishedPulling="2025-10-09 09:29:56.70294024 +0000 UTC m=+1520.193048637" observedRunningTime="2025-10-09 09:29:57.001249486 +0000 UTC m=+1520.491357903" watchObservedRunningTime="2025-10-09 09:29:57.00467227 +0000 UTC m=+1520.494780668" Oct 09 09:29:59 crc kubenswrapper[4710]: I1009 09:29:59.031246 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-nd6hc"] Oct 09 09:29:59 crc kubenswrapper[4710]: I1009 09:29:59.037796 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-9b87z"] Oct 09 09:29:59 crc kubenswrapper[4710]: I1009 09:29:59.044757 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-6xvgl"] Oct 09 09:29:59 crc kubenswrapper[4710]: I1009 09:29:59.050237 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-9b87z"] Oct 09 09:29:59 crc kubenswrapper[4710]: I1009 09:29:59.055724 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-6xvgl"] Oct 09 09:29:59 crc kubenswrapper[4710]: I1009 09:29:59.060117 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-nd6hc"] Oct 09 09:30:00 crc kubenswrapper[4710]: I1009 09:30:00.152024 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333370-6fvxf"] Oct 09 09:30:00 crc kubenswrapper[4710]: I1009 09:30:00.169321 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333370-6fvxf"] Oct 09 09:30:00 crc kubenswrapper[4710]: I1009 09:30:00.169450 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333370-6fvxf" Oct 09 09:30:00 crc kubenswrapper[4710]: I1009 09:30:00.171981 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 09 09:30:00 crc kubenswrapper[4710]: I1009 09:30:00.173379 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 09 09:30:00 crc kubenswrapper[4710]: I1009 09:30:00.265945 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a2a59650-de3e-43c2-8628-27df66314464-config-volume\") pod \"collect-profiles-29333370-6fvxf\" (UID: \"a2a59650-de3e-43c2-8628-27df66314464\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333370-6fvxf" Oct 09 09:30:00 crc kubenswrapper[4710]: I1009 09:30:00.266040 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkbf7\" (UniqueName: \"kubernetes.io/projected/a2a59650-de3e-43c2-8628-27df66314464-kube-api-access-qkbf7\") pod \"collect-profiles-29333370-6fvxf\" (UID: \"a2a59650-de3e-43c2-8628-27df66314464\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333370-6fvxf" Oct 09 09:30:00 crc kubenswrapper[4710]: I1009 09:30:00.266109 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a2a59650-de3e-43c2-8628-27df66314464-secret-volume\") pod \"collect-profiles-29333370-6fvxf\" (UID: \"a2a59650-de3e-43c2-8628-27df66314464\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333370-6fvxf" Oct 09 09:30:00 crc kubenswrapper[4710]: I1009 09:30:00.367045 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a2a59650-de3e-43c2-8628-27df66314464-secret-volume\") pod \"collect-profiles-29333370-6fvxf\" (UID: \"a2a59650-de3e-43c2-8628-27df66314464\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333370-6fvxf" Oct 09 09:30:00 crc kubenswrapper[4710]: I1009 09:30:00.367170 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a2a59650-de3e-43c2-8628-27df66314464-config-volume\") pod \"collect-profiles-29333370-6fvxf\" (UID: \"a2a59650-de3e-43c2-8628-27df66314464\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333370-6fvxf" Oct 09 09:30:00 crc kubenswrapper[4710]: I1009 09:30:00.367194 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkbf7\" (UniqueName: \"kubernetes.io/projected/a2a59650-de3e-43c2-8628-27df66314464-kube-api-access-qkbf7\") pod \"collect-profiles-29333370-6fvxf\" (UID: \"a2a59650-de3e-43c2-8628-27df66314464\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333370-6fvxf" Oct 09 09:30:00 crc kubenswrapper[4710]: I1009 09:30:00.368201 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a2a59650-de3e-43c2-8628-27df66314464-config-volume\") pod \"collect-profiles-29333370-6fvxf\" (UID: \"a2a59650-de3e-43c2-8628-27df66314464\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333370-6fvxf" Oct 09 09:30:00 crc kubenswrapper[4710]: I1009 09:30:00.373832 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a2a59650-de3e-43c2-8628-27df66314464-secret-volume\") pod \"collect-profiles-29333370-6fvxf\" (UID: \"a2a59650-de3e-43c2-8628-27df66314464\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333370-6fvxf" Oct 09 09:30:00 crc kubenswrapper[4710]: I1009 09:30:00.386874 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkbf7\" (UniqueName: \"kubernetes.io/projected/a2a59650-de3e-43c2-8628-27df66314464-kube-api-access-qkbf7\") pod \"collect-profiles-29333370-6fvxf\" (UID: \"a2a59650-de3e-43c2-8628-27df66314464\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333370-6fvxf" Oct 09 09:30:00 crc kubenswrapper[4710]: I1009 09:30:00.485649 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333370-6fvxf" Oct 09 09:30:00 crc kubenswrapper[4710]: I1009 09:30:00.611683 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xk7pp" Oct 09 09:30:00 crc kubenswrapper[4710]: I1009 09:30:00.612940 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xk7pp" Oct 09 09:30:00 crc kubenswrapper[4710]: I1009 09:30:00.659783 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xk7pp" Oct 09 09:30:00 crc kubenswrapper[4710]: I1009 09:30:00.824495 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0bfaedef-c78e-4e3f-88a4-820aa84cb116" path="/var/lib/kubelet/pods/0bfaedef-c78e-4e3f-88a4-820aa84cb116/volumes" Oct 09 09:30:00 crc kubenswrapper[4710]: I1009 09:30:00.825552 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fe9051e-5ccf-4754-9dd0-264bc9b32e9b" path="/var/lib/kubelet/pods/3fe9051e-5ccf-4754-9dd0-264bc9b32e9b/volumes" Oct 09 09:30:00 crc kubenswrapper[4710]: I1009 09:30:00.826224 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6c6070b-6e0d-45a9-bbd8-5d28cd740a11" path="/var/lib/kubelet/pods/d6c6070b-6e0d-45a9-bbd8-5d28cd740a11/volumes" Oct 09 09:30:00 crc kubenswrapper[4710]: I1009 09:30:00.961350 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333370-6fvxf"] Oct 09 09:30:01 crc kubenswrapper[4710]: I1009 09:30:01.021813 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333370-6fvxf" event={"ID":"a2a59650-de3e-43c2-8628-27df66314464","Type":"ContainerStarted","Data":"6d794cce8e80132fad7e7cc63531233664e6d4a9d511118a0a15ddf18b4ff31a"} Oct 09 09:30:01 crc kubenswrapper[4710]: I1009 09:30:01.067841 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xk7pp" Oct 09 09:30:01 crc kubenswrapper[4710]: I1009 09:30:01.113624 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xk7pp"] Oct 09 09:30:01 crc kubenswrapper[4710]: I1009 09:30:01.815197 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:30:01 crc kubenswrapper[4710]: E1009 09:30:01.815524 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:30:02 crc kubenswrapper[4710]: I1009 09:30:02.033546 4710 generic.go:334] "Generic (PLEG): container finished" podID="a2a59650-de3e-43c2-8628-27df66314464" containerID="2fa1f65ea5d29d0252dad6caf7a1e9a1f9f483a13afd71364994b3ca1c6bac5e" exitCode=0 Oct 09 09:30:02 crc kubenswrapper[4710]: I1009 09:30:02.033634 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333370-6fvxf" event={"ID":"a2a59650-de3e-43c2-8628-27df66314464","Type":"ContainerDied","Data":"2fa1f65ea5d29d0252dad6caf7a1e9a1f9f483a13afd71364994b3ca1c6bac5e"} Oct 09 09:30:03 crc kubenswrapper[4710]: I1009 09:30:03.043032 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xk7pp" podUID="6721ad8d-e494-4c43-aecf-8393ad262cdd" containerName="registry-server" containerID="cri-o://6746cd083c4673b16583fd063d4af2d243f4e74567b15d6ea10803b295133097" gracePeriod=2 Oct 09 09:30:03 crc kubenswrapper[4710]: I1009 09:30:03.335018 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333370-6fvxf" Oct 09 09:30:03 crc kubenswrapper[4710]: I1009 09:30:03.421299 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xk7pp" Oct 09 09:30:03 crc kubenswrapper[4710]: I1009 09:30:03.429104 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a2a59650-de3e-43c2-8628-27df66314464-secret-volume\") pod \"a2a59650-de3e-43c2-8628-27df66314464\" (UID: \"a2a59650-de3e-43c2-8628-27df66314464\") " Oct 09 09:30:03 crc kubenswrapper[4710]: I1009 09:30:03.429216 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkbf7\" (UniqueName: \"kubernetes.io/projected/a2a59650-de3e-43c2-8628-27df66314464-kube-api-access-qkbf7\") pod \"a2a59650-de3e-43c2-8628-27df66314464\" (UID: \"a2a59650-de3e-43c2-8628-27df66314464\") " Oct 09 09:30:03 crc kubenswrapper[4710]: I1009 09:30:03.429323 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a2a59650-de3e-43c2-8628-27df66314464-config-volume\") pod \"a2a59650-de3e-43c2-8628-27df66314464\" (UID: \"a2a59650-de3e-43c2-8628-27df66314464\") " Oct 09 09:30:03 crc kubenswrapper[4710]: I1009 09:30:03.430073 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2a59650-de3e-43c2-8628-27df66314464-config-volume" (OuterVolumeSpecName: "config-volume") pod "a2a59650-de3e-43c2-8628-27df66314464" (UID: "a2a59650-de3e-43c2-8628-27df66314464"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:30:03 crc kubenswrapper[4710]: I1009 09:30:03.430348 4710 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a2a59650-de3e-43c2-8628-27df66314464-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 09:30:03 crc kubenswrapper[4710]: I1009 09:30:03.435152 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2a59650-de3e-43c2-8628-27df66314464-kube-api-access-qkbf7" (OuterVolumeSpecName: "kube-api-access-qkbf7") pod "a2a59650-de3e-43c2-8628-27df66314464" (UID: "a2a59650-de3e-43c2-8628-27df66314464"). InnerVolumeSpecName "kube-api-access-qkbf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:30:03 crc kubenswrapper[4710]: I1009 09:30:03.442487 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2a59650-de3e-43c2-8628-27df66314464-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a2a59650-de3e-43c2-8628-27df66314464" (UID: "a2a59650-de3e-43c2-8628-27df66314464"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:30:03 crc kubenswrapper[4710]: I1009 09:30:03.531266 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6721ad8d-e494-4c43-aecf-8393ad262cdd-utilities\") pod \"6721ad8d-e494-4c43-aecf-8393ad262cdd\" (UID: \"6721ad8d-e494-4c43-aecf-8393ad262cdd\") " Oct 09 09:30:03 crc kubenswrapper[4710]: I1009 09:30:03.531526 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6721ad8d-e494-4c43-aecf-8393ad262cdd-catalog-content\") pod \"6721ad8d-e494-4c43-aecf-8393ad262cdd\" (UID: \"6721ad8d-e494-4c43-aecf-8393ad262cdd\") " Oct 09 09:30:03 crc kubenswrapper[4710]: I1009 09:30:03.531578 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcwgw\" (UniqueName: \"kubernetes.io/projected/6721ad8d-e494-4c43-aecf-8393ad262cdd-kube-api-access-qcwgw\") pod \"6721ad8d-e494-4c43-aecf-8393ad262cdd\" (UID: \"6721ad8d-e494-4c43-aecf-8393ad262cdd\") " Oct 09 09:30:03 crc kubenswrapper[4710]: I1009 09:30:03.532034 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6721ad8d-e494-4c43-aecf-8393ad262cdd-utilities" (OuterVolumeSpecName: "utilities") pod "6721ad8d-e494-4c43-aecf-8393ad262cdd" (UID: "6721ad8d-e494-4c43-aecf-8393ad262cdd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:30:03 crc kubenswrapper[4710]: I1009 09:30:03.532271 4710 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a2a59650-de3e-43c2-8628-27df66314464-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 09 09:30:03 crc kubenswrapper[4710]: I1009 09:30:03.532294 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkbf7\" (UniqueName: \"kubernetes.io/projected/a2a59650-de3e-43c2-8628-27df66314464-kube-api-access-qkbf7\") on node \"crc\" DevicePath \"\"" Oct 09 09:30:03 crc kubenswrapper[4710]: I1009 09:30:03.532303 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6721ad8d-e494-4c43-aecf-8393ad262cdd-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:30:03 crc kubenswrapper[4710]: I1009 09:30:03.535201 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6721ad8d-e494-4c43-aecf-8393ad262cdd-kube-api-access-qcwgw" (OuterVolumeSpecName: "kube-api-access-qcwgw") pod "6721ad8d-e494-4c43-aecf-8393ad262cdd" (UID: "6721ad8d-e494-4c43-aecf-8393ad262cdd"). InnerVolumeSpecName "kube-api-access-qcwgw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:30:03 crc kubenswrapper[4710]: I1009 09:30:03.604121 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6721ad8d-e494-4c43-aecf-8393ad262cdd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6721ad8d-e494-4c43-aecf-8393ad262cdd" (UID: "6721ad8d-e494-4c43-aecf-8393ad262cdd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:30:03 crc kubenswrapper[4710]: I1009 09:30:03.636019 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6721ad8d-e494-4c43-aecf-8393ad262cdd-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:30:03 crc kubenswrapper[4710]: I1009 09:30:03.636195 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcwgw\" (UniqueName: \"kubernetes.io/projected/6721ad8d-e494-4c43-aecf-8393ad262cdd-kube-api-access-qcwgw\") on node \"crc\" DevicePath \"\"" Oct 09 09:30:04 crc kubenswrapper[4710]: I1009 09:30:04.059478 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333370-6fvxf" event={"ID":"a2a59650-de3e-43c2-8628-27df66314464","Type":"ContainerDied","Data":"6d794cce8e80132fad7e7cc63531233664e6d4a9d511118a0a15ddf18b4ff31a"} Oct 09 09:30:04 crc kubenswrapper[4710]: I1009 09:30:04.059539 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d794cce8e80132fad7e7cc63531233664e6d4a9d511118a0a15ddf18b4ff31a" Oct 09 09:30:04 crc kubenswrapper[4710]: I1009 09:30:04.059568 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333370-6fvxf" Oct 09 09:30:04 crc kubenswrapper[4710]: I1009 09:30:04.063676 4710 generic.go:334] "Generic (PLEG): container finished" podID="6721ad8d-e494-4c43-aecf-8393ad262cdd" containerID="6746cd083c4673b16583fd063d4af2d243f4e74567b15d6ea10803b295133097" exitCode=0 Oct 09 09:30:04 crc kubenswrapper[4710]: I1009 09:30:04.063707 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xk7pp" event={"ID":"6721ad8d-e494-4c43-aecf-8393ad262cdd","Type":"ContainerDied","Data":"6746cd083c4673b16583fd063d4af2d243f4e74567b15d6ea10803b295133097"} Oct 09 09:30:04 crc kubenswrapper[4710]: I1009 09:30:04.063755 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xk7pp" event={"ID":"6721ad8d-e494-4c43-aecf-8393ad262cdd","Type":"ContainerDied","Data":"0ac6b6437f6c202a4b299e46d1f5f398f77fe5ab986e4c831d46bbe8486e0107"} Oct 09 09:30:04 crc kubenswrapper[4710]: I1009 09:30:04.063781 4710 scope.go:117] "RemoveContainer" containerID="6746cd083c4673b16583fd063d4af2d243f4e74567b15d6ea10803b295133097" Oct 09 09:30:04 crc kubenswrapper[4710]: I1009 09:30:04.063789 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xk7pp" Oct 09 09:30:04 crc kubenswrapper[4710]: I1009 09:30:04.103305 4710 scope.go:117] "RemoveContainer" containerID="f8369d7f62fac2408f6736326a1c474ef5c9fadc74da20637a39ac3e2d9879fb" Oct 09 09:30:04 crc kubenswrapper[4710]: I1009 09:30:04.110821 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xk7pp"] Oct 09 09:30:04 crc kubenswrapper[4710]: I1009 09:30:04.116885 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xk7pp"] Oct 09 09:30:04 crc kubenswrapper[4710]: I1009 09:30:04.128426 4710 scope.go:117] "RemoveContainer" containerID="b9ad0017a8e9bb4acca5d628669b86775be401d1faf9cae8cee2ffd263dcc31b" Oct 09 09:30:04 crc kubenswrapper[4710]: I1009 09:30:04.144027 4710 scope.go:117] "RemoveContainer" containerID="6746cd083c4673b16583fd063d4af2d243f4e74567b15d6ea10803b295133097" Oct 09 09:30:04 crc kubenswrapper[4710]: E1009 09:30:04.144383 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6746cd083c4673b16583fd063d4af2d243f4e74567b15d6ea10803b295133097\": container with ID starting with 6746cd083c4673b16583fd063d4af2d243f4e74567b15d6ea10803b295133097 not found: ID does not exist" containerID="6746cd083c4673b16583fd063d4af2d243f4e74567b15d6ea10803b295133097" Oct 09 09:30:04 crc kubenswrapper[4710]: I1009 09:30:04.144479 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6746cd083c4673b16583fd063d4af2d243f4e74567b15d6ea10803b295133097"} err="failed to get container status \"6746cd083c4673b16583fd063d4af2d243f4e74567b15d6ea10803b295133097\": rpc error: code = NotFound desc = could not find container \"6746cd083c4673b16583fd063d4af2d243f4e74567b15d6ea10803b295133097\": container with ID starting with 6746cd083c4673b16583fd063d4af2d243f4e74567b15d6ea10803b295133097 not found: ID does not exist" Oct 09 09:30:04 crc kubenswrapper[4710]: I1009 09:30:04.144510 4710 scope.go:117] "RemoveContainer" containerID="f8369d7f62fac2408f6736326a1c474ef5c9fadc74da20637a39ac3e2d9879fb" Oct 09 09:30:04 crc kubenswrapper[4710]: E1009 09:30:04.144792 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8369d7f62fac2408f6736326a1c474ef5c9fadc74da20637a39ac3e2d9879fb\": container with ID starting with f8369d7f62fac2408f6736326a1c474ef5c9fadc74da20637a39ac3e2d9879fb not found: ID does not exist" containerID="f8369d7f62fac2408f6736326a1c474ef5c9fadc74da20637a39ac3e2d9879fb" Oct 09 09:30:04 crc kubenswrapper[4710]: I1009 09:30:04.144815 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8369d7f62fac2408f6736326a1c474ef5c9fadc74da20637a39ac3e2d9879fb"} err="failed to get container status \"f8369d7f62fac2408f6736326a1c474ef5c9fadc74da20637a39ac3e2d9879fb\": rpc error: code = NotFound desc = could not find container \"f8369d7f62fac2408f6736326a1c474ef5c9fadc74da20637a39ac3e2d9879fb\": container with ID starting with f8369d7f62fac2408f6736326a1c474ef5c9fadc74da20637a39ac3e2d9879fb not found: ID does not exist" Oct 09 09:30:04 crc kubenswrapper[4710]: I1009 09:30:04.144829 4710 scope.go:117] "RemoveContainer" containerID="b9ad0017a8e9bb4acca5d628669b86775be401d1faf9cae8cee2ffd263dcc31b" Oct 09 09:30:04 crc kubenswrapper[4710]: E1009 09:30:04.145156 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9ad0017a8e9bb4acca5d628669b86775be401d1faf9cae8cee2ffd263dcc31b\": container with ID starting with b9ad0017a8e9bb4acca5d628669b86775be401d1faf9cae8cee2ffd263dcc31b not found: ID does not exist" containerID="b9ad0017a8e9bb4acca5d628669b86775be401d1faf9cae8cee2ffd263dcc31b" Oct 09 09:30:04 crc kubenswrapper[4710]: I1009 09:30:04.145176 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9ad0017a8e9bb4acca5d628669b86775be401d1faf9cae8cee2ffd263dcc31b"} err="failed to get container status \"b9ad0017a8e9bb4acca5d628669b86775be401d1faf9cae8cee2ffd263dcc31b\": rpc error: code = NotFound desc = could not find container \"b9ad0017a8e9bb4acca5d628669b86775be401d1faf9cae8cee2ffd263dcc31b\": container with ID starting with b9ad0017a8e9bb4acca5d628669b86775be401d1faf9cae8cee2ffd263dcc31b not found: ID does not exist" Oct 09 09:30:04 crc kubenswrapper[4710]: I1009 09:30:04.826086 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6721ad8d-e494-4c43-aecf-8393ad262cdd" path="/var/lib/kubelet/pods/6721ad8d-e494-4c43-aecf-8393ad262cdd/volumes" Oct 09 09:30:10 crc kubenswrapper[4710]: I1009 09:30:10.030374 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-m22wk"] Oct 09 09:30:10 crc kubenswrapper[4710]: I1009 09:30:10.036961 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-m22wk"] Oct 09 09:30:10 crc kubenswrapper[4710]: I1009 09:30:10.842713 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="233cf622-10b1-47bb-acf6-12f79fb6ea66" path="/var/lib/kubelet/pods/233cf622-10b1-47bb-acf6-12f79fb6ea66/volumes" Oct 09 09:30:13 crc kubenswrapper[4710]: I1009 09:30:13.028710 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-b5b1-account-create-8776d"] Oct 09 09:30:13 crc kubenswrapper[4710]: I1009 09:30:13.034262 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-322a-account-create-hn7b9"] Oct 09 09:30:13 crc kubenswrapper[4710]: I1009 09:30:13.039419 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-b5b1-account-create-8776d"] Oct 09 09:30:13 crc kubenswrapper[4710]: I1009 09:30:13.045080 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-0133-account-create-jdvz5"] Oct 09 09:30:13 crc kubenswrapper[4710]: I1009 09:30:13.051477 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-0133-account-create-jdvz5"] Oct 09 09:30:13 crc kubenswrapper[4710]: I1009 09:30:13.056120 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-322a-account-create-hn7b9"] Oct 09 09:30:14 crc kubenswrapper[4710]: I1009 09:30:14.825958 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d3e47ce-b689-4ecd-80f6-23cce6d416f6" path="/var/lib/kubelet/pods/2d3e47ce-b689-4ecd-80f6-23cce6d416f6/volumes" Oct 09 09:30:14 crc kubenswrapper[4710]: I1009 09:30:14.826527 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a894075-d541-491f-8041-f8007027c7b3" path="/var/lib/kubelet/pods/6a894075-d541-491f-8041-f8007027c7b3/volumes" Oct 09 09:30:14 crc kubenswrapper[4710]: I1009 09:30:14.826956 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f8a6e70-3050-4bd5-bf95-23c8c32a8add" path="/var/lib/kubelet/pods/6f8a6e70-3050-4bd5-bf95-23c8c32a8add/volumes" Oct 09 09:30:15 crc kubenswrapper[4710]: I1009 09:30:15.815705 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:30:15 crc kubenswrapper[4710]: E1009 09:30:15.816457 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:30:22 crc kubenswrapper[4710]: I1009 09:30:22.024377 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-5pkvd"] Oct 09 09:30:22 crc kubenswrapper[4710]: I1009 09:30:22.029818 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-5pkvd"] Oct 09 09:30:22 crc kubenswrapper[4710]: I1009 09:30:22.823100 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b43105bc-a07d-402a-bef9-d4946f2827c3" path="/var/lib/kubelet/pods/b43105bc-a07d-402a-bef9-d4946f2827c3/volumes" Oct 09 09:30:26 crc kubenswrapper[4710]: I1009 09:30:26.022541 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-jnpq9"] Oct 09 09:30:26 crc kubenswrapper[4710]: I1009 09:30:26.027965 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-jnpq9"] Oct 09 09:30:26 crc kubenswrapper[4710]: I1009 09:30:26.823283 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc5ec23c-a515-4aa1-8a89-db7c503d4c9d" path="/var/lib/kubelet/pods/dc5ec23c-a515-4aa1-8a89-db7c503d4c9d/volumes" Oct 09 09:30:30 crc kubenswrapper[4710]: I1009 09:30:30.814997 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:30:30 crc kubenswrapper[4710]: E1009 09:30:30.817116 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:30:32 crc kubenswrapper[4710]: I1009 09:30:32.033071 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-mm2sn"] Oct 09 09:30:32 crc kubenswrapper[4710]: I1009 09:30:32.038126 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-mm2sn"] Oct 09 09:30:32 crc kubenswrapper[4710]: I1009 09:30:32.827412 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80c435f8-7448-4aa8-bc8b-f40bcca2ff1e" path="/var/lib/kubelet/pods/80c435f8-7448-4aa8-bc8b-f40bcca2ff1e/volumes" Oct 09 09:30:34 crc kubenswrapper[4710]: I1009 09:30:34.309712 4710 generic.go:334] "Generic (PLEG): container finished" podID="80b3492a-68c2-40a4-b164-2a0769a825bf" containerID="a611cb56e1fc4bec7f0ee1327c93ffdba9a2f7628e249194e841d50edc4cc94a" exitCode=2 Oct 09 09:30:34 crc kubenswrapper[4710]: I1009 09:30:34.309782 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn" event={"ID":"80b3492a-68c2-40a4-b164-2a0769a825bf","Type":"ContainerDied","Data":"a611cb56e1fc4bec7f0ee1327c93ffdba9a2f7628e249194e841d50edc4cc94a"} Oct 09 09:30:35 crc kubenswrapper[4710]: I1009 09:30:35.637542 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn" Oct 09 09:30:35 crc kubenswrapper[4710]: I1009 09:30:35.819209 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/80b3492a-68c2-40a4-b164-2a0769a825bf-inventory\") pod \"80b3492a-68c2-40a4-b164-2a0769a825bf\" (UID: \"80b3492a-68c2-40a4-b164-2a0769a825bf\") " Oct 09 09:30:35 crc kubenswrapper[4710]: I1009 09:30:35.819255 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/80b3492a-68c2-40a4-b164-2a0769a825bf-ssh-key\") pod \"80b3492a-68c2-40a4-b164-2a0769a825bf\" (UID: \"80b3492a-68c2-40a4-b164-2a0769a825bf\") " Oct 09 09:30:35 crc kubenswrapper[4710]: I1009 09:30:35.819478 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8dpvg\" (UniqueName: \"kubernetes.io/projected/80b3492a-68c2-40a4-b164-2a0769a825bf-kube-api-access-8dpvg\") pod \"80b3492a-68c2-40a4-b164-2a0769a825bf\" (UID: \"80b3492a-68c2-40a4-b164-2a0769a825bf\") " Oct 09 09:30:35 crc kubenswrapper[4710]: I1009 09:30:35.827380 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80b3492a-68c2-40a4-b164-2a0769a825bf-kube-api-access-8dpvg" (OuterVolumeSpecName: "kube-api-access-8dpvg") pod "80b3492a-68c2-40a4-b164-2a0769a825bf" (UID: "80b3492a-68c2-40a4-b164-2a0769a825bf"). InnerVolumeSpecName "kube-api-access-8dpvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:30:35 crc kubenswrapper[4710]: I1009 09:30:35.843834 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80b3492a-68c2-40a4-b164-2a0769a825bf-inventory" (OuterVolumeSpecName: "inventory") pod "80b3492a-68c2-40a4-b164-2a0769a825bf" (UID: "80b3492a-68c2-40a4-b164-2a0769a825bf"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:30:35 crc kubenswrapper[4710]: I1009 09:30:35.846230 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80b3492a-68c2-40a4-b164-2a0769a825bf-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "80b3492a-68c2-40a4-b164-2a0769a825bf" (UID: "80b3492a-68c2-40a4-b164-2a0769a825bf"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:30:35 crc kubenswrapper[4710]: I1009 09:30:35.921369 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8dpvg\" (UniqueName: \"kubernetes.io/projected/80b3492a-68c2-40a4-b164-2a0769a825bf-kube-api-access-8dpvg\") on node \"crc\" DevicePath \"\"" Oct 09 09:30:35 crc kubenswrapper[4710]: I1009 09:30:35.921555 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/80b3492a-68c2-40a4-b164-2a0769a825bf-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:30:35 crc kubenswrapper[4710]: I1009 09:30:35.921613 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/80b3492a-68c2-40a4-b164-2a0769a825bf-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:30:36 crc kubenswrapper[4710]: I1009 09:30:36.024897 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-6fx6n"] Oct 09 09:30:36 crc kubenswrapper[4710]: I1009 09:30:36.030655 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-6fx6n"] Oct 09 09:30:36 crc kubenswrapper[4710]: I1009 09:30:36.325559 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn" event={"ID":"80b3492a-68c2-40a4-b164-2a0769a825bf","Type":"ContainerDied","Data":"02d745b443639195630cbe4088e0bf20fa02a72b99133eea274bcc7ca87bcbe2"} Oct 09 09:30:36 crc kubenswrapper[4710]: I1009 09:30:36.325595 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="02d745b443639195630cbe4088e0bf20fa02a72b99133eea274bcc7ca87bcbe2" Oct 09 09:30:36 crc kubenswrapper[4710]: I1009 09:30:36.325666 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn" Oct 09 09:30:36 crc kubenswrapper[4710]: I1009 09:30:36.825128 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a69a4b9-5080-4c08-bae1-86c06523c8d1" path="/var/lib/kubelet/pods/1a69a4b9-5080-4c08-bae1-86c06523c8d1/volumes" Oct 09 09:30:42 crc kubenswrapper[4710]: I1009 09:30:42.815788 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:30:42 crc kubenswrapper[4710]: E1009 09:30:42.816648 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.028622 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz"] Oct 09 09:30:43 crc kubenswrapper[4710]: E1009 09:30:43.031095 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6721ad8d-e494-4c43-aecf-8393ad262cdd" containerName="registry-server" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.031135 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="6721ad8d-e494-4c43-aecf-8393ad262cdd" containerName="registry-server" Oct 09 09:30:43 crc kubenswrapper[4710]: E1009 09:30:43.031161 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6721ad8d-e494-4c43-aecf-8393ad262cdd" containerName="extract-utilities" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.031185 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="6721ad8d-e494-4c43-aecf-8393ad262cdd" containerName="extract-utilities" Oct 09 09:30:43 crc kubenswrapper[4710]: E1009 09:30:43.031222 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6721ad8d-e494-4c43-aecf-8393ad262cdd" containerName="extract-content" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.031228 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="6721ad8d-e494-4c43-aecf-8393ad262cdd" containerName="extract-content" Oct 09 09:30:43 crc kubenswrapper[4710]: E1009 09:30:43.031242 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2a59650-de3e-43c2-8628-27df66314464" containerName="collect-profiles" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.031251 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2a59650-de3e-43c2-8628-27df66314464" containerName="collect-profiles" Oct 09 09:30:43 crc kubenswrapper[4710]: E1009 09:30:43.031292 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80b3492a-68c2-40a4-b164-2a0769a825bf" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.031302 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="80b3492a-68c2-40a4-b164-2a0769a825bf" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.031740 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2a59650-de3e-43c2-8628-27df66314464" containerName="collect-profiles" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.031772 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="80b3492a-68c2-40a4-b164-2a0769a825bf" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.031782 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="6721ad8d-e494-4c43-aecf-8393ad262cdd" containerName="registry-server" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.032904 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.036326 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.036417 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.036595 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.036602 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.053496 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz"] Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.060256 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c5e016e6-4a57-4452-9d54-1e0daf63c8aa-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz\" (UID: \"c5e016e6-4a57-4452-9d54-1e0daf63c8aa\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.060358 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47hk4\" (UniqueName: \"kubernetes.io/projected/c5e016e6-4a57-4452-9d54-1e0daf63c8aa-kube-api-access-47hk4\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz\" (UID: \"c5e016e6-4a57-4452-9d54-1e0daf63c8aa\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.060390 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c5e016e6-4a57-4452-9d54-1e0daf63c8aa-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz\" (UID: \"c5e016e6-4a57-4452-9d54-1e0daf63c8aa\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.162495 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c5e016e6-4a57-4452-9d54-1e0daf63c8aa-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz\" (UID: \"c5e016e6-4a57-4452-9d54-1e0daf63c8aa\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.162706 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c5e016e6-4a57-4452-9d54-1e0daf63c8aa-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz\" (UID: \"c5e016e6-4a57-4452-9d54-1e0daf63c8aa\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.162892 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47hk4\" (UniqueName: \"kubernetes.io/projected/c5e016e6-4a57-4452-9d54-1e0daf63c8aa-kube-api-access-47hk4\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz\" (UID: \"c5e016e6-4a57-4452-9d54-1e0daf63c8aa\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.169138 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c5e016e6-4a57-4452-9d54-1e0daf63c8aa-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz\" (UID: \"c5e016e6-4a57-4452-9d54-1e0daf63c8aa\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.169658 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c5e016e6-4a57-4452-9d54-1e0daf63c8aa-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz\" (UID: \"c5e016e6-4a57-4452-9d54-1e0daf63c8aa\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.178939 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47hk4\" (UniqueName: \"kubernetes.io/projected/c5e016e6-4a57-4452-9d54-1e0daf63c8aa-kube-api-access-47hk4\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz\" (UID: \"c5e016e6-4a57-4452-9d54-1e0daf63c8aa\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.367727 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz" Oct 09 09:30:43 crc kubenswrapper[4710]: I1009 09:30:43.864521 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz"] Oct 09 09:30:44 crc kubenswrapper[4710]: I1009 09:30:44.390620 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz" event={"ID":"c5e016e6-4a57-4452-9d54-1e0daf63c8aa","Type":"ContainerStarted","Data":"258cff40bd8a694a15bd0e84d1dff463d4c0d8e6f50f7eb24b8283eaa699274e"} Oct 09 09:30:45 crc kubenswrapper[4710]: I1009 09:30:45.398172 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz" event={"ID":"c5e016e6-4a57-4452-9d54-1e0daf63c8aa","Type":"ContainerStarted","Data":"c5c086d34690eb855a9f566aa1a6e368fbf026251d79168eb86ed679427f546d"} Oct 09 09:30:45 crc kubenswrapper[4710]: I1009 09:30:45.424360 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz" podStartSLOduration=1.8510873110000001 podStartE2EDuration="2.424337474s" podCreationTimestamp="2025-10-09 09:30:43 +0000 UTC" firstStartedPulling="2025-10-09 09:30:43.868335492 +0000 UTC m=+1567.358443890" lastFinishedPulling="2025-10-09 09:30:44.441585655 +0000 UTC m=+1567.931694053" observedRunningTime="2025-10-09 09:30:45.414938288 +0000 UTC m=+1568.905046686" watchObservedRunningTime="2025-10-09 09:30:45.424337474 +0000 UTC m=+1568.914445861" Oct 09 09:30:46 crc kubenswrapper[4710]: I1009 09:30:46.730974 4710 scope.go:117] "RemoveContainer" containerID="e2bd2726ed439093dd05133b380be993a6fed28de75128de3300dfa2a2e4655a" Oct 09 09:30:46 crc kubenswrapper[4710]: I1009 09:30:46.755660 4710 scope.go:117] "RemoveContainer" containerID="68beddf809561a9376a5f6aacada0c20303dd041a88fd11bbfc81c76b6acd473" Oct 09 09:30:46 crc kubenswrapper[4710]: I1009 09:30:46.802944 4710 scope.go:117] "RemoveContainer" containerID="c012d9cb94b56b59689abe9e4a17f95d8ca3aae9229bacd28068cd2469260128" Oct 09 09:30:46 crc kubenswrapper[4710]: I1009 09:30:46.843674 4710 scope.go:117] "RemoveContainer" containerID="d8b9ef2aa408bed2ea317b5c2f23ba2906aa6dab8724ec7470a2fc52adffcdbd" Oct 09 09:30:46 crc kubenswrapper[4710]: I1009 09:30:46.858824 4710 scope.go:117] "RemoveContainer" containerID="421a3a76cd9f2b97043f74dc6e354541cc3aaeab3dc648ace7265d8049c09dc4" Oct 09 09:30:46 crc kubenswrapper[4710]: I1009 09:30:46.903582 4710 scope.go:117] "RemoveContainer" containerID="0570f356b61f41d1164ac3d6a0a7b018fbdc3febdf59bd4baefc85e0da8ab482" Oct 09 09:30:46 crc kubenswrapper[4710]: I1009 09:30:46.921349 4710 scope.go:117] "RemoveContainer" containerID="8814a5fb02920a37c3ebac7d81701836fcbff8deefe7f15f4b8a8ea4aaa3cb50" Oct 09 09:30:46 crc kubenswrapper[4710]: I1009 09:30:46.935769 4710 scope.go:117] "RemoveContainer" containerID="ffc5b24229ebec1b23d9f028d75ccc5c3a026f21f005c3127f24227c1323dcde" Oct 09 09:30:46 crc kubenswrapper[4710]: I1009 09:30:46.962550 4710 scope.go:117] "RemoveContainer" containerID="fc95db0b2f4f099d80594eceeebddd26924653923570ee0c7d201a7e911e9254" Oct 09 09:30:46 crc kubenswrapper[4710]: I1009 09:30:46.981360 4710 scope.go:117] "RemoveContainer" containerID="71ff411d1e1d3d0ed55a133fb52951d8ec254dbefcec84a98a3a8e0a757f1e1c" Oct 09 09:30:47 crc kubenswrapper[4710]: I1009 09:30:47.019909 4710 scope.go:117] "RemoveContainer" containerID="91968fb37e95aa33bba674031de80c9c31cfeaa91100bfca653ade876fb98df7" Oct 09 09:30:47 crc kubenswrapper[4710]: I1009 09:30:47.046847 4710 scope.go:117] "RemoveContainer" containerID="4e0274b649334309e28136c5402649371d08cc0a325fc7305119e64daa2b7400" Oct 09 09:30:47 crc kubenswrapper[4710]: I1009 09:30:47.078262 4710 scope.go:117] "RemoveContainer" containerID="a1bfcf519c7213ae0e4ef45a0e7719a874f24cb3ebace84c75b914f82e5c8c6d" Oct 09 09:30:47 crc kubenswrapper[4710]: I1009 09:30:47.103692 4710 scope.go:117] "RemoveContainer" containerID="6d85b27f2c3296839a5f9deea94f10a1e4a5d676ff3f3d625c43693efad22bee" Oct 09 09:30:50 crc kubenswrapper[4710]: I1009 09:30:50.034543 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-g5glz"] Oct 09 09:30:50 crc kubenswrapper[4710]: I1009 09:30:50.040129 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-g5glz"] Oct 09 09:30:50 crc kubenswrapper[4710]: I1009 09:30:50.826617 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="972b488b-f691-4100-b183-692b2e27f665" path="/var/lib/kubelet/pods/972b488b-f691-4100-b183-692b2e27f665/volumes" Oct 09 09:30:54 crc kubenswrapper[4710]: I1009 09:30:54.815325 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:30:54 crc kubenswrapper[4710]: E1009 09:30:54.816167 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:31:06 crc kubenswrapper[4710]: I1009 09:31:06.821034 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:31:06 crc kubenswrapper[4710]: E1009 09:31:06.822320 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:31:07 crc kubenswrapper[4710]: I1009 09:31:07.040958 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-rmqx2"] Oct 09 09:31:07 crc kubenswrapper[4710]: I1009 09:31:07.046797 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-rmqx2"] Oct 09 09:31:08 crc kubenswrapper[4710]: I1009 09:31:08.823193 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9e9fec6-00af-46a7-9a1f-a59b6b06969c" path="/var/lib/kubelet/pods/c9e9fec6-00af-46a7-9a1f-a59b6b06969c/volumes" Oct 09 09:31:17 crc kubenswrapper[4710]: I1009 09:31:17.023367 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-jkdrz"] Oct 09 09:31:17 crc kubenswrapper[4710]: I1009 09:31:17.030567 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-6dwxx"] Oct 09 09:31:17 crc kubenswrapper[4710]: I1009 09:31:17.035869 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-jkdrz"] Oct 09 09:31:17 crc kubenswrapper[4710]: I1009 09:31:17.040739 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-6dwxx"] Oct 09 09:31:18 crc kubenswrapper[4710]: I1009 09:31:18.691062 4710 generic.go:334] "Generic (PLEG): container finished" podID="c5e016e6-4a57-4452-9d54-1e0daf63c8aa" containerID="c5c086d34690eb855a9f566aa1a6e368fbf026251d79168eb86ed679427f546d" exitCode=0 Oct 09 09:31:18 crc kubenswrapper[4710]: I1009 09:31:18.691109 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz" event={"ID":"c5e016e6-4a57-4452-9d54-1e0daf63c8aa","Type":"ContainerDied","Data":"c5c086d34690eb855a9f566aa1a6e368fbf026251d79168eb86ed679427f546d"} Oct 09 09:31:18 crc kubenswrapper[4710]: I1009 09:31:18.824296 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03740f1c-82bd-434e-9544-be5dc802648a" path="/var/lib/kubelet/pods/03740f1c-82bd-434e-9544-be5dc802648a/volumes" Oct 09 09:31:18 crc kubenswrapper[4710]: I1009 09:31:18.825146 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba8ecfdc-58cc-457e-971c-13583f94d4ee" path="/var/lib/kubelet/pods/ba8ecfdc-58cc-457e-971c-13583f94d4ee/volumes" Oct 09 09:31:19 crc kubenswrapper[4710]: I1009 09:31:19.815175 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:31:19 crc kubenswrapper[4710]: E1009 09:31:19.815715 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.014738 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz" Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.208148 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c5e016e6-4a57-4452-9d54-1e0daf63c8aa-ssh-key\") pod \"c5e016e6-4a57-4452-9d54-1e0daf63c8aa\" (UID: \"c5e016e6-4a57-4452-9d54-1e0daf63c8aa\") " Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.208537 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c5e016e6-4a57-4452-9d54-1e0daf63c8aa-inventory\") pod \"c5e016e6-4a57-4452-9d54-1e0daf63c8aa\" (UID: \"c5e016e6-4a57-4452-9d54-1e0daf63c8aa\") " Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.208848 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-47hk4\" (UniqueName: \"kubernetes.io/projected/c5e016e6-4a57-4452-9d54-1e0daf63c8aa-kube-api-access-47hk4\") pod \"c5e016e6-4a57-4452-9d54-1e0daf63c8aa\" (UID: \"c5e016e6-4a57-4452-9d54-1e0daf63c8aa\") " Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.214600 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5e016e6-4a57-4452-9d54-1e0daf63c8aa-kube-api-access-47hk4" (OuterVolumeSpecName: "kube-api-access-47hk4") pod "c5e016e6-4a57-4452-9d54-1e0daf63c8aa" (UID: "c5e016e6-4a57-4452-9d54-1e0daf63c8aa"). InnerVolumeSpecName "kube-api-access-47hk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.230376 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5e016e6-4a57-4452-9d54-1e0daf63c8aa-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c5e016e6-4a57-4452-9d54-1e0daf63c8aa" (UID: "c5e016e6-4a57-4452-9d54-1e0daf63c8aa"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.232657 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5e016e6-4a57-4452-9d54-1e0daf63c8aa-inventory" (OuterVolumeSpecName: "inventory") pod "c5e016e6-4a57-4452-9d54-1e0daf63c8aa" (UID: "c5e016e6-4a57-4452-9d54-1e0daf63c8aa"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.310970 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-47hk4\" (UniqueName: \"kubernetes.io/projected/c5e016e6-4a57-4452-9d54-1e0daf63c8aa-kube-api-access-47hk4\") on node \"crc\" DevicePath \"\"" Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.311210 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c5e016e6-4a57-4452-9d54-1e0daf63c8aa-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.311245 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c5e016e6-4a57-4452-9d54-1e0daf63c8aa-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.706028 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz" event={"ID":"c5e016e6-4a57-4452-9d54-1e0daf63c8aa","Type":"ContainerDied","Data":"258cff40bd8a694a15bd0e84d1dff463d4c0d8e6f50f7eb24b8283eaa699274e"} Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.706063 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="258cff40bd8a694a15bd0e84d1dff463d4c0d8e6f50f7eb24b8283eaa699274e" Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.706071 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz" Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.783164 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-8fkpk"] Oct 09 09:31:20 crc kubenswrapper[4710]: E1009 09:31:20.783503 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5e016e6-4a57-4452-9d54-1e0daf63c8aa" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.783520 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5e016e6-4a57-4452-9d54-1e0daf63c8aa" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.783702 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5e016e6-4a57-4452-9d54-1e0daf63c8aa" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.784251 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-8fkpk" Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.786206 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.786740 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.787259 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.787414 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.795576 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-8fkpk"] Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.920000 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/705fa1ab-958a-4fa8-a290-f43c0e1260d6-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-8fkpk\" (UID: \"705fa1ab-958a-4fa8-a290-f43c0e1260d6\") " pod="openstack/ssh-known-hosts-edpm-deployment-8fkpk" Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.921100 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnx6g\" (UniqueName: \"kubernetes.io/projected/705fa1ab-958a-4fa8-a290-f43c0e1260d6-kube-api-access-hnx6g\") pod \"ssh-known-hosts-edpm-deployment-8fkpk\" (UID: \"705fa1ab-958a-4fa8-a290-f43c0e1260d6\") " pod="openstack/ssh-known-hosts-edpm-deployment-8fkpk" Oct 09 09:31:20 crc kubenswrapper[4710]: I1009 09:31:20.921402 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/705fa1ab-958a-4fa8-a290-f43c0e1260d6-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-8fkpk\" (UID: \"705fa1ab-958a-4fa8-a290-f43c0e1260d6\") " pod="openstack/ssh-known-hosts-edpm-deployment-8fkpk" Oct 09 09:31:21 crc kubenswrapper[4710]: I1009 09:31:21.022993 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/705fa1ab-958a-4fa8-a290-f43c0e1260d6-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-8fkpk\" (UID: \"705fa1ab-958a-4fa8-a290-f43c0e1260d6\") " pod="openstack/ssh-known-hosts-edpm-deployment-8fkpk" Oct 09 09:31:21 crc kubenswrapper[4710]: I1009 09:31:21.023094 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/705fa1ab-958a-4fa8-a290-f43c0e1260d6-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-8fkpk\" (UID: \"705fa1ab-958a-4fa8-a290-f43c0e1260d6\") " pod="openstack/ssh-known-hosts-edpm-deployment-8fkpk" Oct 09 09:31:21 crc kubenswrapper[4710]: I1009 09:31:21.023130 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnx6g\" (UniqueName: \"kubernetes.io/projected/705fa1ab-958a-4fa8-a290-f43c0e1260d6-kube-api-access-hnx6g\") pod \"ssh-known-hosts-edpm-deployment-8fkpk\" (UID: \"705fa1ab-958a-4fa8-a290-f43c0e1260d6\") " pod="openstack/ssh-known-hosts-edpm-deployment-8fkpk" Oct 09 09:31:21 crc kubenswrapper[4710]: I1009 09:31:21.026915 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/705fa1ab-958a-4fa8-a290-f43c0e1260d6-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-8fkpk\" (UID: \"705fa1ab-958a-4fa8-a290-f43c0e1260d6\") " pod="openstack/ssh-known-hosts-edpm-deployment-8fkpk" Oct 09 09:31:21 crc kubenswrapper[4710]: I1009 09:31:21.029471 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/705fa1ab-958a-4fa8-a290-f43c0e1260d6-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-8fkpk\" (UID: \"705fa1ab-958a-4fa8-a290-f43c0e1260d6\") " pod="openstack/ssh-known-hosts-edpm-deployment-8fkpk" Oct 09 09:31:21 crc kubenswrapper[4710]: I1009 09:31:21.037568 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnx6g\" (UniqueName: \"kubernetes.io/projected/705fa1ab-958a-4fa8-a290-f43c0e1260d6-kube-api-access-hnx6g\") pod \"ssh-known-hosts-edpm-deployment-8fkpk\" (UID: \"705fa1ab-958a-4fa8-a290-f43c0e1260d6\") " pod="openstack/ssh-known-hosts-edpm-deployment-8fkpk" Oct 09 09:31:21 crc kubenswrapper[4710]: I1009 09:31:21.101047 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-8fkpk" Oct 09 09:31:21 crc kubenswrapper[4710]: I1009 09:31:21.546760 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-8fkpk"] Oct 09 09:31:21 crc kubenswrapper[4710]: I1009 09:31:21.714299 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-8fkpk" event={"ID":"705fa1ab-958a-4fa8-a290-f43c0e1260d6","Type":"ContainerStarted","Data":"be9245063dd5ff0bfb85e9bd5c98232821fb01e8e81091b60337acfb9a0bbd32"} Oct 09 09:31:22 crc kubenswrapper[4710]: I1009 09:31:22.721956 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-8fkpk" event={"ID":"705fa1ab-958a-4fa8-a290-f43c0e1260d6","Type":"ContainerStarted","Data":"24d659da17c694c662eb7dcbaaba6824f80f10cded4476e8810864ce79e68c79"} Oct 09 09:31:22 crc kubenswrapper[4710]: I1009 09:31:22.737747 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-8fkpk" podStartSLOduration=2.184478171 podStartE2EDuration="2.737730982s" podCreationTimestamp="2025-10-09 09:31:20 +0000 UTC" firstStartedPulling="2025-10-09 09:31:21.552988137 +0000 UTC m=+1605.043096535" lastFinishedPulling="2025-10-09 09:31:22.106240949 +0000 UTC m=+1605.596349346" observedRunningTime="2025-10-09 09:31:22.732946729 +0000 UTC m=+1606.223055126" watchObservedRunningTime="2025-10-09 09:31:22.737730982 +0000 UTC m=+1606.227839379" Oct 09 09:31:24 crc kubenswrapper[4710]: I1009 09:31:24.020909 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-skpfw"] Oct 09 09:31:24 crc kubenswrapper[4710]: I1009 09:31:24.028359 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-skpfw"] Oct 09 09:31:24 crc kubenswrapper[4710]: I1009 09:31:24.824199 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="311d9086-0967-491c-91c7-0c783685fb7e" path="/var/lib/kubelet/pods/311d9086-0967-491c-91c7-0c783685fb7e/volumes" Oct 09 09:31:27 crc kubenswrapper[4710]: I1009 09:31:27.759057 4710 generic.go:334] "Generic (PLEG): container finished" podID="705fa1ab-958a-4fa8-a290-f43c0e1260d6" containerID="24d659da17c694c662eb7dcbaaba6824f80f10cded4476e8810864ce79e68c79" exitCode=0 Oct 09 09:31:27 crc kubenswrapper[4710]: I1009 09:31:27.759132 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-8fkpk" event={"ID":"705fa1ab-958a-4fa8-a290-f43c0e1260d6","Type":"ContainerDied","Data":"24d659da17c694c662eb7dcbaaba6824f80f10cded4476e8810864ce79e68c79"} Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.032778 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-01f6-account-create-gk6mj"] Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.036583 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-e773-account-create-99d7l"] Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.041719 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-01f6-account-create-gk6mj"] Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.045975 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-e773-account-create-99d7l"] Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.053739 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-8fkpk" Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.253729 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/705fa1ab-958a-4fa8-a290-f43c0e1260d6-inventory-0\") pod \"705fa1ab-958a-4fa8-a290-f43c0e1260d6\" (UID: \"705fa1ab-958a-4fa8-a290-f43c0e1260d6\") " Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.253793 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/705fa1ab-958a-4fa8-a290-f43c0e1260d6-ssh-key-openstack-edpm-ipam\") pod \"705fa1ab-958a-4fa8-a290-f43c0e1260d6\" (UID: \"705fa1ab-958a-4fa8-a290-f43c0e1260d6\") " Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.253982 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnx6g\" (UniqueName: \"kubernetes.io/projected/705fa1ab-958a-4fa8-a290-f43c0e1260d6-kube-api-access-hnx6g\") pod \"705fa1ab-958a-4fa8-a290-f43c0e1260d6\" (UID: \"705fa1ab-958a-4fa8-a290-f43c0e1260d6\") " Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.259134 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/705fa1ab-958a-4fa8-a290-f43c0e1260d6-kube-api-access-hnx6g" (OuterVolumeSpecName: "kube-api-access-hnx6g") pod "705fa1ab-958a-4fa8-a290-f43c0e1260d6" (UID: "705fa1ab-958a-4fa8-a290-f43c0e1260d6"). InnerVolumeSpecName "kube-api-access-hnx6g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.274696 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/705fa1ab-958a-4fa8-a290-f43c0e1260d6-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "705fa1ab-958a-4fa8-a290-f43c0e1260d6" (UID: "705fa1ab-958a-4fa8-a290-f43c0e1260d6"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.275731 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/705fa1ab-958a-4fa8-a290-f43c0e1260d6-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "705fa1ab-958a-4fa8-a290-f43c0e1260d6" (UID: "705fa1ab-958a-4fa8-a290-f43c0e1260d6"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.357132 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnx6g\" (UniqueName: \"kubernetes.io/projected/705fa1ab-958a-4fa8-a290-f43c0e1260d6-kube-api-access-hnx6g\") on node \"crc\" DevicePath \"\"" Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.357171 4710 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/705fa1ab-958a-4fa8-a290-f43c0e1260d6-inventory-0\") on node \"crc\" DevicePath \"\"" Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.357185 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/705fa1ab-958a-4fa8-a290-f43c0e1260d6-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.778706 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-8fkpk" event={"ID":"705fa1ab-958a-4fa8-a290-f43c0e1260d6","Type":"ContainerDied","Data":"be9245063dd5ff0bfb85e9bd5c98232821fb01e8e81091b60337acfb9a0bbd32"} Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.778747 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be9245063dd5ff0bfb85e9bd5c98232821fb01e8e81091b60337acfb9a0bbd32" Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.778807 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-8fkpk" Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.831719 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z"] Oct 09 09:31:29 crc kubenswrapper[4710]: E1009 09:31:29.832044 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="705fa1ab-958a-4fa8-a290-f43c0e1260d6" containerName="ssh-known-hosts-edpm-deployment" Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.832061 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="705fa1ab-958a-4fa8-a290-f43c0e1260d6" containerName="ssh-known-hosts-edpm-deployment" Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.832247 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="705fa1ab-958a-4fa8-a290-f43c0e1260d6" containerName="ssh-known-hosts-edpm-deployment" Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.832890 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z" Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.834847 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.835137 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.835289 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.835420 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.841369 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z"] Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.969268 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/755e66af-574b-4004-91fe-5872354cc0d8-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ppm5z\" (UID: \"755e66af-574b-4004-91fe-5872354cc0d8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z" Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.969879 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j89hn\" (UniqueName: \"kubernetes.io/projected/755e66af-574b-4004-91fe-5872354cc0d8-kube-api-access-j89hn\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ppm5z\" (UID: \"755e66af-574b-4004-91fe-5872354cc0d8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z" Oct 09 09:31:29 crc kubenswrapper[4710]: I1009 09:31:29.969987 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/755e66af-574b-4004-91fe-5872354cc0d8-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ppm5z\" (UID: \"755e66af-574b-4004-91fe-5872354cc0d8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z" Oct 09 09:31:30 crc kubenswrapper[4710]: I1009 09:31:30.073091 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/755e66af-574b-4004-91fe-5872354cc0d8-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ppm5z\" (UID: \"755e66af-574b-4004-91fe-5872354cc0d8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z" Oct 09 09:31:30 crc kubenswrapper[4710]: I1009 09:31:30.073233 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j89hn\" (UniqueName: \"kubernetes.io/projected/755e66af-574b-4004-91fe-5872354cc0d8-kube-api-access-j89hn\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ppm5z\" (UID: \"755e66af-574b-4004-91fe-5872354cc0d8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z" Oct 09 09:31:30 crc kubenswrapper[4710]: I1009 09:31:30.073260 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/755e66af-574b-4004-91fe-5872354cc0d8-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ppm5z\" (UID: \"755e66af-574b-4004-91fe-5872354cc0d8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z" Oct 09 09:31:30 crc kubenswrapper[4710]: I1009 09:31:30.078587 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/755e66af-574b-4004-91fe-5872354cc0d8-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ppm5z\" (UID: \"755e66af-574b-4004-91fe-5872354cc0d8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z" Oct 09 09:31:30 crc kubenswrapper[4710]: I1009 09:31:30.079052 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/755e66af-574b-4004-91fe-5872354cc0d8-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ppm5z\" (UID: \"755e66af-574b-4004-91fe-5872354cc0d8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z" Oct 09 09:31:30 crc kubenswrapper[4710]: I1009 09:31:30.089046 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j89hn\" (UniqueName: \"kubernetes.io/projected/755e66af-574b-4004-91fe-5872354cc0d8-kube-api-access-j89hn\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ppm5z\" (UID: \"755e66af-574b-4004-91fe-5872354cc0d8\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z" Oct 09 09:31:30 crc kubenswrapper[4710]: I1009 09:31:30.148512 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z" Oct 09 09:31:30 crc kubenswrapper[4710]: I1009 09:31:30.588109 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z"] Oct 09 09:31:30 crc kubenswrapper[4710]: I1009 09:31:30.788298 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z" event={"ID":"755e66af-574b-4004-91fe-5872354cc0d8","Type":"ContainerStarted","Data":"e0793392dd906a88b00d94da5c46976f1acbf359ac34fbdafdeced38eba588f3"} Oct 09 09:31:30 crc kubenswrapper[4710]: I1009 09:31:30.824300 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23e08cbb-62cb-4534-811f-dfce27d4c222" path="/var/lib/kubelet/pods/23e08cbb-62cb-4534-811f-dfce27d4c222/volumes" Oct 09 09:31:30 crc kubenswrapper[4710]: I1009 09:31:30.824810 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="283d4932-4284-4a3e-af2e-197c82a84fb9" path="/var/lib/kubelet/pods/283d4932-4284-4a3e-af2e-197c82a84fb9/volumes" Oct 09 09:31:31 crc kubenswrapper[4710]: I1009 09:31:31.795648 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z" event={"ID":"755e66af-574b-4004-91fe-5872354cc0d8","Type":"ContainerStarted","Data":"8b707def3b08b50f51b57f8ffa5e265df9110b30948cdd0c58f88c2283d4c46c"} Oct 09 09:31:31 crc kubenswrapper[4710]: I1009 09:31:31.819503 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z" podStartSLOduration=2.174260699 podStartE2EDuration="2.819488098s" podCreationTimestamp="2025-10-09 09:31:29 +0000 UTC" firstStartedPulling="2025-10-09 09:31:30.592168362 +0000 UTC m=+1614.082276759" lastFinishedPulling="2025-10-09 09:31:31.237395761 +0000 UTC m=+1614.727504158" observedRunningTime="2025-10-09 09:31:31.819148127 +0000 UTC m=+1615.309256524" watchObservedRunningTime="2025-10-09 09:31:31.819488098 +0000 UTC m=+1615.309596496" Oct 09 09:31:32 crc kubenswrapper[4710]: I1009 09:31:32.816791 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:31:32 crc kubenswrapper[4710]: E1009 09:31:32.817206 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:31:37 crc kubenswrapper[4710]: I1009 09:31:37.864231 4710 generic.go:334] "Generic (PLEG): container finished" podID="755e66af-574b-4004-91fe-5872354cc0d8" containerID="8b707def3b08b50f51b57f8ffa5e265df9110b30948cdd0c58f88c2283d4c46c" exitCode=0 Oct 09 09:31:37 crc kubenswrapper[4710]: I1009 09:31:37.864314 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z" event={"ID":"755e66af-574b-4004-91fe-5872354cc0d8","Type":"ContainerDied","Data":"8b707def3b08b50f51b57f8ffa5e265df9110b30948cdd0c58f88c2283d4c46c"} Oct 09 09:31:38 crc kubenswrapper[4710]: I1009 09:31:38.023908 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-4b56-account-create-658sj"] Oct 09 09:31:38 crc kubenswrapper[4710]: I1009 09:31:38.029848 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-4b56-account-create-658sj"] Oct 09 09:31:38 crc kubenswrapper[4710]: I1009 09:31:38.823731 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b77a877-c31e-4ede-831f-2dc85c37d957" path="/var/lib/kubelet/pods/9b77a877-c31e-4ede-831f-2dc85c37d957/volumes" Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.175759 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z" Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.245793 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/755e66af-574b-4004-91fe-5872354cc0d8-ssh-key\") pod \"755e66af-574b-4004-91fe-5872354cc0d8\" (UID: \"755e66af-574b-4004-91fe-5872354cc0d8\") " Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.245891 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/755e66af-574b-4004-91fe-5872354cc0d8-inventory\") pod \"755e66af-574b-4004-91fe-5872354cc0d8\" (UID: \"755e66af-574b-4004-91fe-5872354cc0d8\") " Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.245933 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j89hn\" (UniqueName: \"kubernetes.io/projected/755e66af-574b-4004-91fe-5872354cc0d8-kube-api-access-j89hn\") pod \"755e66af-574b-4004-91fe-5872354cc0d8\" (UID: \"755e66af-574b-4004-91fe-5872354cc0d8\") " Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.251611 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/755e66af-574b-4004-91fe-5872354cc0d8-kube-api-access-j89hn" (OuterVolumeSpecName: "kube-api-access-j89hn") pod "755e66af-574b-4004-91fe-5872354cc0d8" (UID: "755e66af-574b-4004-91fe-5872354cc0d8"). InnerVolumeSpecName "kube-api-access-j89hn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.267551 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/755e66af-574b-4004-91fe-5872354cc0d8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "755e66af-574b-4004-91fe-5872354cc0d8" (UID: "755e66af-574b-4004-91fe-5872354cc0d8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.272069 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/755e66af-574b-4004-91fe-5872354cc0d8-inventory" (OuterVolumeSpecName: "inventory") pod "755e66af-574b-4004-91fe-5872354cc0d8" (UID: "755e66af-574b-4004-91fe-5872354cc0d8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.348508 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/755e66af-574b-4004-91fe-5872354cc0d8-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.348555 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/755e66af-574b-4004-91fe-5872354cc0d8-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.348567 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j89hn\" (UniqueName: \"kubernetes.io/projected/755e66af-574b-4004-91fe-5872354cc0d8-kube-api-access-j89hn\") on node \"crc\" DevicePath \"\"" Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.880986 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z" event={"ID":"755e66af-574b-4004-91fe-5872354cc0d8","Type":"ContainerDied","Data":"e0793392dd906a88b00d94da5c46976f1acbf359ac34fbdafdeced38eba588f3"} Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.881023 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e0793392dd906a88b00d94da5c46976f1acbf359ac34fbdafdeced38eba588f3" Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.881036 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z" Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.933584 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk"] Oct 09 09:31:39 crc kubenswrapper[4710]: E1009 09:31:39.933952 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="755e66af-574b-4004-91fe-5872354cc0d8" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.933985 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="755e66af-574b-4004-91fe-5872354cc0d8" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.934178 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="755e66af-574b-4004-91fe-5872354cc0d8" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.934767 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk" Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.957274 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk"] Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.960055 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.960341 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.960499 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.960674 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.962983 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/596a1e0e-a90f-4d76-b0c7-746d444e4cf2-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk\" (UID: \"596a1e0e-a90f-4d76-b0c7-746d444e4cf2\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk" Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.963031 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vszz8\" (UniqueName: \"kubernetes.io/projected/596a1e0e-a90f-4d76-b0c7-746d444e4cf2-kube-api-access-vszz8\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk\" (UID: \"596a1e0e-a90f-4d76-b0c7-746d444e4cf2\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk" Oct 09 09:31:39 crc kubenswrapper[4710]: I1009 09:31:39.963214 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/596a1e0e-a90f-4d76-b0c7-746d444e4cf2-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk\" (UID: \"596a1e0e-a90f-4d76-b0c7-746d444e4cf2\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk" Oct 09 09:31:40 crc kubenswrapper[4710]: I1009 09:31:40.064079 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/596a1e0e-a90f-4d76-b0c7-746d444e4cf2-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk\" (UID: \"596a1e0e-a90f-4d76-b0c7-746d444e4cf2\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk" Oct 09 09:31:40 crc kubenswrapper[4710]: I1009 09:31:40.064118 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vszz8\" (UniqueName: \"kubernetes.io/projected/596a1e0e-a90f-4d76-b0c7-746d444e4cf2-kube-api-access-vszz8\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk\" (UID: \"596a1e0e-a90f-4d76-b0c7-746d444e4cf2\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk" Oct 09 09:31:40 crc kubenswrapper[4710]: I1009 09:31:40.064316 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/596a1e0e-a90f-4d76-b0c7-746d444e4cf2-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk\" (UID: \"596a1e0e-a90f-4d76-b0c7-746d444e4cf2\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk" Oct 09 09:31:40 crc kubenswrapper[4710]: I1009 09:31:40.069457 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/596a1e0e-a90f-4d76-b0c7-746d444e4cf2-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk\" (UID: \"596a1e0e-a90f-4d76-b0c7-746d444e4cf2\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk" Oct 09 09:31:40 crc kubenswrapper[4710]: I1009 09:31:40.070509 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/596a1e0e-a90f-4d76-b0c7-746d444e4cf2-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk\" (UID: \"596a1e0e-a90f-4d76-b0c7-746d444e4cf2\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk" Oct 09 09:31:40 crc kubenswrapper[4710]: I1009 09:31:40.077766 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vszz8\" (UniqueName: \"kubernetes.io/projected/596a1e0e-a90f-4d76-b0c7-746d444e4cf2-kube-api-access-vszz8\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk\" (UID: \"596a1e0e-a90f-4d76-b0c7-746d444e4cf2\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk" Oct 09 09:31:40 crc kubenswrapper[4710]: I1009 09:31:40.299405 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk" Oct 09 09:31:40 crc kubenswrapper[4710]: I1009 09:31:40.752109 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk"] Oct 09 09:31:40 crc kubenswrapper[4710]: I1009 09:31:40.894607 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk" event={"ID":"596a1e0e-a90f-4d76-b0c7-746d444e4cf2","Type":"ContainerStarted","Data":"4e846ea7276d66d569d9a0399aaa96e376b83faa50c97f9310ea9ee0f64f7d83"} Oct 09 09:31:41 crc kubenswrapper[4710]: I1009 09:31:41.904302 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk" event={"ID":"596a1e0e-a90f-4d76-b0c7-746d444e4cf2","Type":"ContainerStarted","Data":"d58f396e91bf15782d1716f261a9a07e90bc7d346f46e005f26fc94c28382312"} Oct 09 09:31:44 crc kubenswrapper[4710]: I1009 09:31:44.814835 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:31:44 crc kubenswrapper[4710]: E1009 09:31:44.815694 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:31:47 crc kubenswrapper[4710]: I1009 09:31:47.316693 4710 scope.go:117] "RemoveContainer" containerID="20b7d5d1ea2e8d027f6ac1a980a3024e1871dfcb2d77a3f4d99ff6abf6414b1c" Oct 09 09:31:47 crc kubenswrapper[4710]: I1009 09:31:47.358090 4710 scope.go:117] "RemoveContainer" containerID="9b2664b53678277fb17e0f0e1fff6d1cecbd394d9d162f68ba0eeb7c6487b88c" Oct 09 09:31:47 crc kubenswrapper[4710]: I1009 09:31:47.372203 4710 scope.go:117] "RemoveContainer" containerID="7503d9c57c6aa5494069872083154dca9a14d3285548af2e64fd831ff833c049" Oct 09 09:31:47 crc kubenswrapper[4710]: I1009 09:31:47.413331 4710 scope.go:117] "RemoveContainer" containerID="21294f6fa11199db134d7b7759e234647241c6c5b71de84a903f6e7cde0dadf7" Oct 09 09:31:47 crc kubenswrapper[4710]: I1009 09:31:47.438287 4710 scope.go:117] "RemoveContainer" containerID="78135de6aabe93a9be1cc7ccb736691fe78ed289d1c61ae5fd216ed656211d52" Oct 09 09:31:47 crc kubenswrapper[4710]: I1009 09:31:47.463406 4710 scope.go:117] "RemoveContainer" containerID="58175b2ee152dc9210861748435624e7594fb216a19cdb4d98ba91e3be1c649d" Oct 09 09:31:47 crc kubenswrapper[4710]: I1009 09:31:47.504756 4710 scope.go:117] "RemoveContainer" containerID="e732afc665a465ed802c0efb27558808cb2189bd7dceb567a8c43a4cf281a248" Oct 09 09:31:47 crc kubenswrapper[4710]: I1009 09:31:47.523707 4710 scope.go:117] "RemoveContainer" containerID="4c1c2a09b8b4455cee8c59949cdc410f77b86eb9216a93b95aa5d9cde740deb4" Oct 09 09:31:52 crc kubenswrapper[4710]: I1009 09:31:52.022214 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk" podStartSLOduration=12.494633232 podStartE2EDuration="13.022197698s" podCreationTimestamp="2025-10-09 09:31:39 +0000 UTC" firstStartedPulling="2025-10-09 09:31:40.757608948 +0000 UTC m=+1624.247717346" lastFinishedPulling="2025-10-09 09:31:41.285173416 +0000 UTC m=+1624.775281812" observedRunningTime="2025-10-09 09:31:41.920731916 +0000 UTC m=+1625.410840313" watchObservedRunningTime="2025-10-09 09:31:52.022197698 +0000 UTC m=+1635.512306096" Oct 09 09:31:52 crc kubenswrapper[4710]: I1009 09:31:52.023930 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-r9l65"] Oct 09 09:31:52 crc kubenswrapper[4710]: I1009 09:31:52.029642 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-r9l65"] Oct 09 09:31:52 crc kubenswrapper[4710]: I1009 09:31:52.823324 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acb71d13-b228-499f-b4ec-0fc78dfb5663" path="/var/lib/kubelet/pods/acb71d13-b228-499f-b4ec-0fc78dfb5663/volumes" Oct 09 09:31:58 crc kubenswrapper[4710]: I1009 09:31:58.814636 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:31:58 crc kubenswrapper[4710]: E1009 09:31:58.815347 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:32:09 crc kubenswrapper[4710]: I1009 09:32:09.815021 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:32:09 crc kubenswrapper[4710]: E1009 09:32:09.815615 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:32:11 crc kubenswrapper[4710]: I1009 09:32:11.027677 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-74mpr"] Oct 09 09:32:11 crc kubenswrapper[4710]: I1009 09:32:11.035478 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-74mpr"] Oct 09 09:32:12 crc kubenswrapper[4710]: I1009 09:32:12.825499 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b890adc9-4438-4ec2-b8a9-ef4c010630a2" path="/var/lib/kubelet/pods/b890adc9-4438-4ec2-b8a9-ef4c010630a2/volumes" Oct 09 09:32:14 crc kubenswrapper[4710]: I1009 09:32:14.021760 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-nvq24"] Oct 09 09:32:14 crc kubenswrapper[4710]: I1009 09:32:14.029832 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-nvq24"] Oct 09 09:32:14 crc kubenswrapper[4710]: I1009 09:32:14.824314 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e80dad3d-99a6-42a9-8d55-c54f02dee2bd" path="/var/lib/kubelet/pods/e80dad3d-99a6-42a9-8d55-c54f02dee2bd/volumes" Oct 09 09:32:22 crc kubenswrapper[4710]: I1009 09:32:22.814376 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:32:22 crc kubenswrapper[4710]: E1009 09:32:22.814939 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:32:36 crc kubenswrapper[4710]: I1009 09:32:36.821735 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:32:36 crc kubenswrapper[4710]: E1009 09:32:36.822903 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:32:47 crc kubenswrapper[4710]: I1009 09:32:47.683794 4710 scope.go:117] "RemoveContainer" containerID="8034ea960debf427021913fb5dc3aeae0f584145bb5af61c5407a2d175fb0abd" Oct 09 09:32:47 crc kubenswrapper[4710]: I1009 09:32:47.715788 4710 scope.go:117] "RemoveContainer" containerID="e91dfa268a8aeb2b803efa369c2df8dba8063108309f8fde5bf2f9acaf5662aa" Oct 09 09:32:47 crc kubenswrapper[4710]: I1009 09:32:47.751700 4710 scope.go:117] "RemoveContainer" containerID="4e8cfac6d19526cb2dcc1273b769b08eb964fea262a3ce0f2d15f2026d9c0efd" Oct 09 09:32:48 crc kubenswrapper[4710]: I1009 09:32:48.815113 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:32:48 crc kubenswrapper[4710]: E1009 09:32:48.815565 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:32:52 crc kubenswrapper[4710]: I1009 09:32:52.371318 4710 generic.go:334] "Generic (PLEG): container finished" podID="596a1e0e-a90f-4d76-b0c7-746d444e4cf2" containerID="d58f396e91bf15782d1716f261a9a07e90bc7d346f46e005f26fc94c28382312" exitCode=0 Oct 09 09:32:52 crc kubenswrapper[4710]: I1009 09:32:52.371382 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk" event={"ID":"596a1e0e-a90f-4d76-b0c7-746d444e4cf2","Type":"ContainerDied","Data":"d58f396e91bf15782d1716f261a9a07e90bc7d346f46e005f26fc94c28382312"} Oct 09 09:32:53 crc kubenswrapper[4710]: I1009 09:32:53.683827 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk" Oct 09 09:32:53 crc kubenswrapper[4710]: I1009 09:32:53.711469 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/596a1e0e-a90f-4d76-b0c7-746d444e4cf2-ssh-key\") pod \"596a1e0e-a90f-4d76-b0c7-746d444e4cf2\" (UID: \"596a1e0e-a90f-4d76-b0c7-746d444e4cf2\") " Oct 09 09:32:53 crc kubenswrapper[4710]: I1009 09:32:53.711593 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/596a1e0e-a90f-4d76-b0c7-746d444e4cf2-inventory\") pod \"596a1e0e-a90f-4d76-b0c7-746d444e4cf2\" (UID: \"596a1e0e-a90f-4d76-b0c7-746d444e4cf2\") " Oct 09 09:32:53 crc kubenswrapper[4710]: I1009 09:32:53.711658 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vszz8\" (UniqueName: \"kubernetes.io/projected/596a1e0e-a90f-4d76-b0c7-746d444e4cf2-kube-api-access-vszz8\") pod \"596a1e0e-a90f-4d76-b0c7-746d444e4cf2\" (UID: \"596a1e0e-a90f-4d76-b0c7-746d444e4cf2\") " Oct 09 09:32:53 crc kubenswrapper[4710]: I1009 09:32:53.720794 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/596a1e0e-a90f-4d76-b0c7-746d444e4cf2-kube-api-access-vszz8" (OuterVolumeSpecName: "kube-api-access-vszz8") pod "596a1e0e-a90f-4d76-b0c7-746d444e4cf2" (UID: "596a1e0e-a90f-4d76-b0c7-746d444e4cf2"). InnerVolumeSpecName "kube-api-access-vszz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:32:53 crc kubenswrapper[4710]: I1009 09:32:53.732338 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/596a1e0e-a90f-4d76-b0c7-746d444e4cf2-inventory" (OuterVolumeSpecName: "inventory") pod "596a1e0e-a90f-4d76-b0c7-746d444e4cf2" (UID: "596a1e0e-a90f-4d76-b0c7-746d444e4cf2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:32:53 crc kubenswrapper[4710]: I1009 09:32:53.734349 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/596a1e0e-a90f-4d76-b0c7-746d444e4cf2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "596a1e0e-a90f-4d76-b0c7-746d444e4cf2" (UID: "596a1e0e-a90f-4d76-b0c7-746d444e4cf2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:32:53 crc kubenswrapper[4710]: I1009 09:32:53.813833 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/596a1e0e-a90f-4d76-b0c7-746d444e4cf2-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:32:53 crc kubenswrapper[4710]: I1009 09:32:53.813866 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/596a1e0e-a90f-4d76-b0c7-746d444e4cf2-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:32:53 crc kubenswrapper[4710]: I1009 09:32:53.813875 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vszz8\" (UniqueName: \"kubernetes.io/projected/596a1e0e-a90f-4d76-b0c7-746d444e4cf2-kube-api-access-vszz8\") on node \"crc\" DevicePath \"\"" Oct 09 09:32:54 crc kubenswrapper[4710]: I1009 09:32:54.385192 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk" event={"ID":"596a1e0e-a90f-4d76-b0c7-746d444e4cf2","Type":"ContainerDied","Data":"4e846ea7276d66d569d9a0399aaa96e376b83faa50c97f9310ea9ee0f64f7d83"} Oct 09 09:32:54 crc kubenswrapper[4710]: I1009 09:32:54.385227 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e846ea7276d66d569d9a0399aaa96e376b83faa50c97f9310ea9ee0f64f7d83" Oct 09 09:32:54 crc kubenswrapper[4710]: I1009 09:32:54.385289 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk" Oct 09 09:32:59 crc kubenswrapper[4710]: I1009 09:32:59.041012 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-tlfsf"] Oct 09 09:32:59 crc kubenswrapper[4710]: I1009 09:32:59.046293 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-tlfsf"] Oct 09 09:33:00 crc kubenswrapper[4710]: I1009 09:33:00.823069 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99754569-4c4f-4219-8aad-d9491e9117b5" path="/var/lib/kubelet/pods/99754569-4c4f-4219-8aad-d9491e9117b5/volumes" Oct 09 09:33:02 crc kubenswrapper[4710]: I1009 09:33:02.815812 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:33:02 crc kubenswrapper[4710]: E1009 09:33:02.816220 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:33:16 crc kubenswrapper[4710]: I1009 09:33:16.820166 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:33:16 crc kubenswrapper[4710]: E1009 09:33:16.820816 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:33:31 crc kubenswrapper[4710]: I1009 09:33:31.816113 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:33:31 crc kubenswrapper[4710]: E1009 09:33:31.817085 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:33:46 crc kubenswrapper[4710]: I1009 09:33:46.820856 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:33:46 crc kubenswrapper[4710]: E1009 09:33:46.821651 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:33:47 crc kubenswrapper[4710]: I1009 09:33:47.820250 4710 scope.go:117] "RemoveContainer" containerID="82cc46c655a1d76bded7a5c79b4b92b554d6c98650edb09aa90c984b3b38d25a" Oct 09 09:34:01 crc kubenswrapper[4710]: I1009 09:34:01.815355 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:34:01 crc kubenswrapper[4710]: E1009 09:34:01.816172 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:34:15 crc kubenswrapper[4710]: I1009 09:34:15.815638 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:34:15 crc kubenswrapper[4710]: E1009 09:34:15.816498 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:34:28 crc kubenswrapper[4710]: I1009 09:34:28.815741 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:34:28 crc kubenswrapper[4710]: E1009 09:34:28.816713 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:34:42 crc kubenswrapper[4710]: I1009 09:34:42.815087 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:34:43 crc kubenswrapper[4710]: I1009 09:34:43.159386 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerStarted","Data":"6de4f77feff45a70d7347d44205a91ed5c6ec990b689388cd2cec08bb7b9671b"} Oct 09 09:36:26 crc kubenswrapper[4710]: E1009 09:36:26.524400 4710 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 192.168.26.166:58302->192.168.26.166:38571: write tcp 192.168.26.166:58302->192.168.26.166:38571: write: broken pipe Oct 09 09:37:05 crc kubenswrapper[4710]: I1009 09:37:05.545905 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:37:05 crc kubenswrapper[4710]: I1009 09:37:05.546498 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:37:11 crc kubenswrapper[4710]: I1009 09:37:11.396955 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-8fkpk"] Oct 09 09:37:11 crc kubenswrapper[4710]: I1009 09:37:11.420524 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-8fkpk"] Oct 09 09:37:11 crc kubenswrapper[4710]: I1009 09:37:11.429563 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl"] Oct 09 09:37:11 crc kubenswrapper[4710]: I1009 09:37:11.434751 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn"] Oct 09 09:37:11 crc kubenswrapper[4710]: I1009 09:37:11.441881 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz"] Oct 09 09:37:11 crc kubenswrapper[4710]: I1009 09:37:11.447282 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj"] Oct 09 09:37:11 crc kubenswrapper[4710]: I1009 09:37:11.451823 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk"] Oct 09 09:37:11 crc kubenswrapper[4710]: I1009 09:37:11.456941 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z"] Oct 09 09:37:11 crc kubenswrapper[4710]: I1009 09:37:11.461278 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk"] Oct 09 09:37:11 crc kubenswrapper[4710]: I1009 09:37:11.465278 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-st4fn"] Oct 09 09:37:11 crc kubenswrapper[4710]: I1009 09:37:11.469604 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-xvmxz"] Oct 09 09:37:11 crc kubenswrapper[4710]: I1009 09:37:11.473729 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-ppm5z"] Oct 09 09:37:11 crc kubenswrapper[4710]: I1009 09:37:11.479450 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls"] Oct 09 09:37:11 crc kubenswrapper[4710]: I1009 09:37:11.484652 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-pqhsl"] Oct 09 09:37:11 crc kubenswrapper[4710]: I1009 09:37:11.488879 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-x7rnk"] Oct 09 09:37:11 crc kubenswrapper[4710]: I1009 09:37:11.492794 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4"] Oct 09 09:37:11 crc kubenswrapper[4710]: I1009 09:37:11.496867 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-th4cj"] Oct 09 09:37:11 crc kubenswrapper[4710]: I1009 09:37:11.500738 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-fmjxk"] Oct 09 09:37:11 crc kubenswrapper[4710]: I1009 09:37:11.504791 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-jkgls"] Oct 09 09:37:11 crc kubenswrapper[4710]: I1009 09:37:11.509022 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-dftt4"] Oct 09 09:37:11 crc kubenswrapper[4710]: I1009 09:37:11.513311 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb"] Oct 09 09:37:11 crc kubenswrapper[4710]: I1009 09:37:11.517564 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-d2xpb"] Oct 09 09:37:12 crc kubenswrapper[4710]: I1009 09:37:12.826101 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d24c1e7-935b-4892-8208-b85a8f841f73" path="/var/lib/kubelet/pods/0d24c1e7-935b-4892-8208-b85a8f841f73/volumes" Oct 09 09:37:12 crc kubenswrapper[4710]: I1009 09:37:12.826929 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16969c4a-5d88-4cf5-a512-677bf1c2a3ac" path="/var/lib/kubelet/pods/16969c4a-5d88-4cf5-a512-677bf1c2a3ac/volumes" Oct 09 09:37:12 crc kubenswrapper[4710]: I1009 09:37:12.827634 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fbed35b-0bc2-4811-bb62-64d295aad22f" path="/var/lib/kubelet/pods/1fbed35b-0bc2-4811-bb62-64d295aad22f/volumes" Oct 09 09:37:12 crc kubenswrapper[4710]: I1009 09:37:12.828240 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="596a1e0e-a90f-4d76-b0c7-746d444e4cf2" path="/var/lib/kubelet/pods/596a1e0e-a90f-4d76-b0c7-746d444e4cf2/volumes" Oct 09 09:37:12 crc kubenswrapper[4710]: I1009 09:37:12.829449 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="705fa1ab-958a-4fa8-a290-f43c0e1260d6" path="/var/lib/kubelet/pods/705fa1ab-958a-4fa8-a290-f43c0e1260d6/volumes" Oct 09 09:37:12 crc kubenswrapper[4710]: I1009 09:37:12.830021 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="755e66af-574b-4004-91fe-5872354cc0d8" path="/var/lib/kubelet/pods/755e66af-574b-4004-91fe-5872354cc0d8/volumes" Oct 09 09:37:12 crc kubenswrapper[4710]: I1009 09:37:12.830565 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80b3492a-68c2-40a4-b164-2a0769a825bf" path="/var/lib/kubelet/pods/80b3492a-68c2-40a4-b164-2a0769a825bf/volumes" Oct 09 09:37:12 crc kubenswrapper[4710]: I1009 09:37:12.831744 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d7c8160-10f5-4908-a278-d8265862ef51" path="/var/lib/kubelet/pods/8d7c8160-10f5-4908-a278-d8265862ef51/volumes" Oct 09 09:37:12 crc kubenswrapper[4710]: I1009 09:37:12.832276 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5e016e6-4a57-4452-9d54-1e0daf63c8aa" path="/var/lib/kubelet/pods/c5e016e6-4a57-4452-9d54-1e0daf63c8aa/volumes" Oct 09 09:37:12 crc kubenswrapper[4710]: I1009 09:37:12.832917 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d476761d-c8c2-4c6a-9c2f-31ab12f8a403" path="/var/lib/kubelet/pods/d476761d-c8c2-4c6a-9c2f-31ab12f8a403/volumes" Oct 09 09:37:12 crc kubenswrapper[4710]: I1009 09:37:12.834232 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e85c0fdc-926b-4a34-9578-b19fd827b749" path="/var/lib/kubelet/pods/e85c0fdc-926b-4a34-9578-b19fd827b749/volumes" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.544015 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv"] Oct 09 09:37:23 crc kubenswrapper[4710]: E1009 09:37:23.544852 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="596a1e0e-a90f-4d76-b0c7-746d444e4cf2" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.544867 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="596a1e0e-a90f-4d76-b0c7-746d444e4cf2" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.545076 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="596a1e0e-a90f-4d76-b0c7-746d444e4cf2" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.545621 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.548024 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.548359 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.548620 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.548788 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.548811 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.564769 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv"] Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.600759 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcxkn\" (UniqueName: \"kubernetes.io/projected/0394728a-d605-415c-afcf-5f52e6b3bcac-kube-api-access-lcxkn\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv\" (UID: \"0394728a-d605-415c-afcf-5f52e6b3bcac\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.601061 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv\" (UID: \"0394728a-d605-415c-afcf-5f52e6b3bcac\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.601176 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv\" (UID: \"0394728a-d605-415c-afcf-5f52e6b3bcac\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.601338 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv\" (UID: \"0394728a-d605-415c-afcf-5f52e6b3bcac\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.601477 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv\" (UID: \"0394728a-d605-415c-afcf-5f52e6b3bcac\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.703305 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv\" (UID: \"0394728a-d605-415c-afcf-5f52e6b3bcac\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.704217 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcxkn\" (UniqueName: \"kubernetes.io/projected/0394728a-d605-415c-afcf-5f52e6b3bcac-kube-api-access-lcxkn\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv\" (UID: \"0394728a-d605-415c-afcf-5f52e6b3bcac\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.704331 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv\" (UID: \"0394728a-d605-415c-afcf-5f52e6b3bcac\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.704364 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv\" (UID: \"0394728a-d605-415c-afcf-5f52e6b3bcac\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.704752 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv\" (UID: \"0394728a-d605-415c-afcf-5f52e6b3bcac\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.712751 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv\" (UID: \"0394728a-d605-415c-afcf-5f52e6b3bcac\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.713673 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-ceph\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv\" (UID: \"0394728a-d605-415c-afcf-5f52e6b3bcac\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.713894 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv\" (UID: \"0394728a-d605-415c-afcf-5f52e6b3bcac\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.721134 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv\" (UID: \"0394728a-d605-415c-afcf-5f52e6b3bcac\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.722274 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcxkn\" (UniqueName: \"kubernetes.io/projected/0394728a-d605-415c-afcf-5f52e6b3bcac-kube-api-access-lcxkn\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv\" (UID: \"0394728a-d605-415c-afcf-5f52e6b3bcac\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" Oct 09 09:37:23 crc kubenswrapper[4710]: I1009 09:37:23.863237 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" Oct 09 09:37:24 crc kubenswrapper[4710]: I1009 09:37:24.343052 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv"] Oct 09 09:37:24 crc kubenswrapper[4710]: I1009 09:37:24.345833 4710 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 09:37:24 crc kubenswrapper[4710]: I1009 09:37:24.357707 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" event={"ID":"0394728a-d605-415c-afcf-5f52e6b3bcac","Type":"ContainerStarted","Data":"7af42a158abf25281e98d3ec5eabaae89017b3c74f879687148a175436ed94f2"} Oct 09 09:37:25 crc kubenswrapper[4710]: I1009 09:37:25.381020 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" event={"ID":"0394728a-d605-415c-afcf-5f52e6b3bcac","Type":"ContainerStarted","Data":"863b572ef57d5425869c5a9be5cbc28d2de144857df0fec5edd86c299cb470a2"} Oct 09 09:37:35 crc kubenswrapper[4710]: I1009 09:37:35.483180 4710 generic.go:334] "Generic (PLEG): container finished" podID="0394728a-d605-415c-afcf-5f52e6b3bcac" containerID="863b572ef57d5425869c5a9be5cbc28d2de144857df0fec5edd86c299cb470a2" exitCode=0 Oct 09 09:37:35 crc kubenswrapper[4710]: I1009 09:37:35.483226 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" event={"ID":"0394728a-d605-415c-afcf-5f52e6b3bcac","Type":"ContainerDied","Data":"863b572ef57d5425869c5a9be5cbc28d2de144857df0fec5edd86c299cb470a2"} Oct 09 09:37:35 crc kubenswrapper[4710]: I1009 09:37:35.546075 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:37:35 crc kubenswrapper[4710]: I1009 09:37:35.546141 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:37:36 crc kubenswrapper[4710]: I1009 09:37:36.888924 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.027303 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lcxkn\" (UniqueName: \"kubernetes.io/projected/0394728a-d605-415c-afcf-5f52e6b3bcac-kube-api-access-lcxkn\") pod \"0394728a-d605-415c-afcf-5f52e6b3bcac\" (UID: \"0394728a-d605-415c-afcf-5f52e6b3bcac\") " Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.027840 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-ssh-key\") pod \"0394728a-d605-415c-afcf-5f52e6b3bcac\" (UID: \"0394728a-d605-415c-afcf-5f52e6b3bcac\") " Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.028015 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-repo-setup-combined-ca-bundle\") pod \"0394728a-d605-415c-afcf-5f52e6b3bcac\" (UID: \"0394728a-d605-415c-afcf-5f52e6b3bcac\") " Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.028141 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-inventory\") pod \"0394728a-d605-415c-afcf-5f52e6b3bcac\" (UID: \"0394728a-d605-415c-afcf-5f52e6b3bcac\") " Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.028283 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-ceph\") pod \"0394728a-d605-415c-afcf-5f52e6b3bcac\" (UID: \"0394728a-d605-415c-afcf-5f52e6b3bcac\") " Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.035731 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "0394728a-d605-415c-afcf-5f52e6b3bcac" (UID: "0394728a-d605-415c-afcf-5f52e6b3bcac"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.035795 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-ceph" (OuterVolumeSpecName: "ceph") pod "0394728a-d605-415c-afcf-5f52e6b3bcac" (UID: "0394728a-d605-415c-afcf-5f52e6b3bcac"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.035939 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0394728a-d605-415c-afcf-5f52e6b3bcac-kube-api-access-lcxkn" (OuterVolumeSpecName: "kube-api-access-lcxkn") pod "0394728a-d605-415c-afcf-5f52e6b3bcac" (UID: "0394728a-d605-415c-afcf-5f52e6b3bcac"). InnerVolumeSpecName "kube-api-access-lcxkn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.052989 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0394728a-d605-415c-afcf-5f52e6b3bcac" (UID: "0394728a-d605-415c-afcf-5f52e6b3bcac"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.053403 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-inventory" (OuterVolumeSpecName: "inventory") pod "0394728a-d605-415c-afcf-5f52e6b3bcac" (UID: "0394728a-d605-415c-afcf-5f52e6b3bcac"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.132270 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lcxkn\" (UniqueName: \"kubernetes.io/projected/0394728a-d605-415c-afcf-5f52e6b3bcac-kube-api-access-lcxkn\") on node \"crc\" DevicePath \"\"" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.132302 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.132313 4710 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.132324 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.132336 4710 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/0394728a-d605-415c-afcf-5f52e6b3bcac-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.502463 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" event={"ID":"0394728a-d605-415c-afcf-5f52e6b3bcac","Type":"ContainerDied","Data":"7af42a158abf25281e98d3ec5eabaae89017b3c74f879687148a175436ed94f2"} Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.502511 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7af42a158abf25281e98d3ec5eabaae89017b3c74f879687148a175436ed94f2" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.502782 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.600486 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc"] Oct 09 09:37:37 crc kubenswrapper[4710]: E1009 09:37:37.603809 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0394728a-d605-415c-afcf-5f52e6b3bcac" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.603852 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="0394728a-d605-415c-afcf-5f52e6b3bcac" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.604438 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="0394728a-d605-415c-afcf-5f52e6b3bcac" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.606163 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.610561 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc"] Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.618103 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.618346 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.618404 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.619151 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.619166 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.750811 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84b4z\" (UniqueName: \"kubernetes.io/projected/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-kube-api-access-84b4z\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc\" (UID: \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.751126 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc\" (UID: \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.751153 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc\" (UID: \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.751312 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc\" (UID: \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.751359 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc\" (UID: \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.855227 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc\" (UID: \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.855264 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc\" (UID: \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.855378 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc\" (UID: \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.855407 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc\" (UID: \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.856347 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84b4z\" (UniqueName: \"kubernetes.io/projected/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-kube-api-access-84b4z\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc\" (UID: \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.864410 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc\" (UID: \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.866472 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc\" (UID: \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.866554 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc\" (UID: \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.867528 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-ceph\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc\" (UID: \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.873669 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84b4z\" (UniqueName: \"kubernetes.io/projected/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-kube-api-access-84b4z\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc\" (UID: \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" Oct 09 09:37:37 crc kubenswrapper[4710]: I1009 09:37:37.933167 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" Oct 09 09:37:38 crc kubenswrapper[4710]: I1009 09:37:38.436375 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc"] Oct 09 09:37:38 crc kubenswrapper[4710]: I1009 09:37:38.511191 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" event={"ID":"1d4d6d90-9993-4a75-8ea7-e6d488a370b0","Type":"ContainerStarted","Data":"a88b909f144a2b4b5b3567b6566cba1087b7e18c3a0da2f615ea057df68a6f6f"} Oct 09 09:37:39 crc kubenswrapper[4710]: I1009 09:37:39.519666 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" event={"ID":"1d4d6d90-9993-4a75-8ea7-e6d488a370b0","Type":"ContainerStarted","Data":"1dee5933e367e4dd9c532a2efcebb9995d014db70b6f5305a795cce510b88371"} Oct 09 09:37:39 crc kubenswrapper[4710]: I1009 09:37:39.542289 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" podStartSLOduration=1.986353284 podStartE2EDuration="2.542267825s" podCreationTimestamp="2025-10-09 09:37:37 +0000 UTC" firstStartedPulling="2025-10-09 09:37:38.440206997 +0000 UTC m=+1981.930315393" lastFinishedPulling="2025-10-09 09:37:38.996121537 +0000 UTC m=+1982.486229934" observedRunningTime="2025-10-09 09:37:39.539125009 +0000 UTC m=+1983.029233407" watchObservedRunningTime="2025-10-09 09:37:39.542267825 +0000 UTC m=+1983.032376222" Oct 09 09:37:47 crc kubenswrapper[4710]: I1009 09:37:47.931332 4710 scope.go:117] "RemoveContainer" containerID="c5c086d34690eb855a9f566aa1a6e368fbf026251d79168eb86ed679427f546d" Oct 09 09:37:47 crc kubenswrapper[4710]: I1009 09:37:47.994067 4710 scope.go:117] "RemoveContainer" containerID="24d659da17c694c662eb7dcbaaba6824f80f10cded4476e8810864ce79e68c79" Oct 09 09:37:48 crc kubenswrapper[4710]: I1009 09:37:48.035531 4710 scope.go:117] "RemoveContainer" containerID="a611cb56e1fc4bec7f0ee1327c93ffdba9a2f7628e249194e841d50edc4cc94a" Oct 09 09:37:48 crc kubenswrapper[4710]: I1009 09:37:48.156329 4710 scope.go:117] "RemoveContainer" containerID="d58f396e91bf15782d1716f261a9a07e90bc7d346f46e005f26fc94c28382312" Oct 09 09:37:48 crc kubenswrapper[4710]: I1009 09:37:48.239206 4710 scope.go:117] "RemoveContainer" containerID="e3a4b9fa37e87ad8a55189d8ac640d746bbbc976f0762dd5b06569154a94d6e5" Oct 09 09:37:48 crc kubenswrapper[4710]: I1009 09:37:48.323746 4710 scope.go:117] "RemoveContainer" containerID="8b707def3b08b50f51b57f8ffa5e265df9110b30948cdd0c58f88c2283d4c46c" Oct 09 09:37:48 crc kubenswrapper[4710]: I1009 09:37:48.354360 4710 scope.go:117] "RemoveContainer" containerID="ddf0fb04012808f75eaffae1f03e60c0231b9dd7fde25bf11294b92cdc4b5184" Oct 09 09:37:48 crc kubenswrapper[4710]: I1009 09:37:48.375163 4710 scope.go:117] "RemoveContainer" containerID="2243d4256ac2afa7936aeeb2ef39b47da418830dac6c09a1a374dbe19ccc06a3" Oct 09 09:37:48 crc kubenswrapper[4710]: I1009 09:37:48.398905 4710 scope.go:117] "RemoveContainer" containerID="f362f78f15219982d6c9252adfe7e5fa3ff93433b0bd8f193416a426098612b6" Oct 09 09:37:48 crc kubenswrapper[4710]: I1009 09:37:48.448016 4710 scope.go:117] "RemoveContainer" containerID="e8fa7ea86cf30fecd8c06fd174461fa4cbf3c29c9c9829d70175551ccf5efb57" Oct 09 09:37:48 crc kubenswrapper[4710]: I1009 09:37:48.471597 4710 scope.go:117] "RemoveContainer" containerID="d403c8eeba60a023a48e371857ab63ec49fb33a231d30911942ea4b96b45025b" Oct 09 09:38:05 crc kubenswrapper[4710]: I1009 09:38:05.545859 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:38:05 crc kubenswrapper[4710]: I1009 09:38:05.546501 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:38:05 crc kubenswrapper[4710]: I1009 09:38:05.546563 4710 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:38:05 crc kubenswrapper[4710]: I1009 09:38:05.741127 4710 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6de4f77feff45a70d7347d44205a91ed5c6ec990b689388cd2cec08bb7b9671b"} pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 09:38:05 crc kubenswrapper[4710]: I1009 09:38:05.741337 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" containerID="cri-o://6de4f77feff45a70d7347d44205a91ed5c6ec990b689388cd2cec08bb7b9671b" gracePeriod=600 Oct 09 09:38:06 crc kubenswrapper[4710]: I1009 09:38:06.750089 4710 generic.go:334] "Generic (PLEG): container finished" podID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerID="6de4f77feff45a70d7347d44205a91ed5c6ec990b689388cd2cec08bb7b9671b" exitCode=0 Oct 09 09:38:06 crc kubenswrapper[4710]: I1009 09:38:06.750139 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerDied","Data":"6de4f77feff45a70d7347d44205a91ed5c6ec990b689388cd2cec08bb7b9671b"} Oct 09 09:38:06 crc kubenswrapper[4710]: I1009 09:38:06.751619 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerStarted","Data":"ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2"} Oct 09 09:38:06 crc kubenswrapper[4710]: I1009 09:38:06.751661 4710 scope.go:117] "RemoveContainer" containerID="d6a135cb4e1542fea3ce7ee7c1822065650ed5f2b0b84fe2e70f9cde99e33179" Oct 09 09:38:12 crc kubenswrapper[4710]: I1009 09:38:12.876444 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pzhvk"] Oct 09 09:38:12 crc kubenswrapper[4710]: I1009 09:38:12.879831 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pzhvk" Oct 09 09:38:12 crc kubenswrapper[4710]: I1009 09:38:12.902032 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pzhvk"] Oct 09 09:38:12 crc kubenswrapper[4710]: I1009 09:38:12.959780 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b38ca396-bd17-457b-a3c2-410aaba647de-utilities\") pod \"redhat-marketplace-pzhvk\" (UID: \"b38ca396-bd17-457b-a3c2-410aaba647de\") " pod="openshift-marketplace/redhat-marketplace-pzhvk" Oct 09 09:38:12 crc kubenswrapper[4710]: I1009 09:38:12.960166 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d57qb\" (UniqueName: \"kubernetes.io/projected/b38ca396-bd17-457b-a3c2-410aaba647de-kube-api-access-d57qb\") pod \"redhat-marketplace-pzhvk\" (UID: \"b38ca396-bd17-457b-a3c2-410aaba647de\") " pod="openshift-marketplace/redhat-marketplace-pzhvk" Oct 09 09:38:12 crc kubenswrapper[4710]: I1009 09:38:12.960261 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b38ca396-bd17-457b-a3c2-410aaba647de-catalog-content\") pod \"redhat-marketplace-pzhvk\" (UID: \"b38ca396-bd17-457b-a3c2-410aaba647de\") " pod="openshift-marketplace/redhat-marketplace-pzhvk" Oct 09 09:38:13 crc kubenswrapper[4710]: I1009 09:38:13.061258 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b38ca396-bd17-457b-a3c2-410aaba647de-utilities\") pod \"redhat-marketplace-pzhvk\" (UID: \"b38ca396-bd17-457b-a3c2-410aaba647de\") " pod="openshift-marketplace/redhat-marketplace-pzhvk" Oct 09 09:38:13 crc kubenswrapper[4710]: I1009 09:38:13.061422 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d57qb\" (UniqueName: \"kubernetes.io/projected/b38ca396-bd17-457b-a3c2-410aaba647de-kube-api-access-d57qb\") pod \"redhat-marketplace-pzhvk\" (UID: \"b38ca396-bd17-457b-a3c2-410aaba647de\") " pod="openshift-marketplace/redhat-marketplace-pzhvk" Oct 09 09:38:13 crc kubenswrapper[4710]: I1009 09:38:13.061500 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b38ca396-bd17-457b-a3c2-410aaba647de-catalog-content\") pod \"redhat-marketplace-pzhvk\" (UID: \"b38ca396-bd17-457b-a3c2-410aaba647de\") " pod="openshift-marketplace/redhat-marketplace-pzhvk" Oct 09 09:38:13 crc kubenswrapper[4710]: I1009 09:38:13.061850 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b38ca396-bd17-457b-a3c2-410aaba647de-utilities\") pod \"redhat-marketplace-pzhvk\" (UID: \"b38ca396-bd17-457b-a3c2-410aaba647de\") " pod="openshift-marketplace/redhat-marketplace-pzhvk" Oct 09 09:38:13 crc kubenswrapper[4710]: I1009 09:38:13.061899 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b38ca396-bd17-457b-a3c2-410aaba647de-catalog-content\") pod \"redhat-marketplace-pzhvk\" (UID: \"b38ca396-bd17-457b-a3c2-410aaba647de\") " pod="openshift-marketplace/redhat-marketplace-pzhvk" Oct 09 09:38:13 crc kubenswrapper[4710]: I1009 09:38:13.081390 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d57qb\" (UniqueName: \"kubernetes.io/projected/b38ca396-bd17-457b-a3c2-410aaba647de-kube-api-access-d57qb\") pod \"redhat-marketplace-pzhvk\" (UID: \"b38ca396-bd17-457b-a3c2-410aaba647de\") " pod="openshift-marketplace/redhat-marketplace-pzhvk" Oct 09 09:38:13 crc kubenswrapper[4710]: I1009 09:38:13.200986 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pzhvk" Oct 09 09:38:13 crc kubenswrapper[4710]: I1009 09:38:13.621302 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pzhvk"] Oct 09 09:38:13 crc kubenswrapper[4710]: I1009 09:38:13.809806 4710 generic.go:334] "Generic (PLEG): container finished" podID="b38ca396-bd17-457b-a3c2-410aaba647de" containerID="5c6c5a7e6fabd02618cc64425c55488c26fbf77cc57a4fdf5877ff24ebfb31a9" exitCode=0 Oct 09 09:38:13 crc kubenswrapper[4710]: I1009 09:38:13.809990 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pzhvk" event={"ID":"b38ca396-bd17-457b-a3c2-410aaba647de","Type":"ContainerDied","Data":"5c6c5a7e6fabd02618cc64425c55488c26fbf77cc57a4fdf5877ff24ebfb31a9"} Oct 09 09:38:13 crc kubenswrapper[4710]: I1009 09:38:13.810109 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pzhvk" event={"ID":"b38ca396-bd17-457b-a3c2-410aaba647de","Type":"ContainerStarted","Data":"2a337297b9936e2df24b6cda6a53c20927d927578d86eebad6aed6170e24ae41"} Oct 09 09:38:14 crc kubenswrapper[4710]: I1009 09:38:14.823752 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pzhvk" event={"ID":"b38ca396-bd17-457b-a3c2-410aaba647de","Type":"ContainerStarted","Data":"28d4b961d4eb28a6a68e26a4908bb5f54e5a8d3f090952e283cdd5859016848e"} Oct 09 09:38:15 crc kubenswrapper[4710]: I1009 09:38:15.830277 4710 generic.go:334] "Generic (PLEG): container finished" podID="b38ca396-bd17-457b-a3c2-410aaba647de" containerID="28d4b961d4eb28a6a68e26a4908bb5f54e5a8d3f090952e283cdd5859016848e" exitCode=0 Oct 09 09:38:15 crc kubenswrapper[4710]: I1009 09:38:15.830351 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pzhvk" event={"ID":"b38ca396-bd17-457b-a3c2-410aaba647de","Type":"ContainerDied","Data":"28d4b961d4eb28a6a68e26a4908bb5f54e5a8d3f090952e283cdd5859016848e"} Oct 09 09:38:16 crc kubenswrapper[4710]: I1009 09:38:16.847090 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pzhvk" event={"ID":"b38ca396-bd17-457b-a3c2-410aaba647de","Type":"ContainerStarted","Data":"b15ec956d0bd545aa4917261ee527a66d981cc0b724e29394a151286406d9bd0"} Oct 09 09:38:16 crc kubenswrapper[4710]: I1009 09:38:16.871484 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pzhvk" podStartSLOduration=2.196710381 podStartE2EDuration="4.871462303s" podCreationTimestamp="2025-10-09 09:38:12 +0000 UTC" firstStartedPulling="2025-10-09 09:38:13.812167198 +0000 UTC m=+2017.302275596" lastFinishedPulling="2025-10-09 09:38:16.486919121 +0000 UTC m=+2019.977027518" observedRunningTime="2025-10-09 09:38:16.864966213 +0000 UTC m=+2020.355074610" watchObservedRunningTime="2025-10-09 09:38:16.871462303 +0000 UTC m=+2020.361570700" Oct 09 09:38:23 crc kubenswrapper[4710]: I1009 09:38:23.201975 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pzhvk" Oct 09 09:38:23 crc kubenswrapper[4710]: I1009 09:38:23.202395 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pzhvk" Oct 09 09:38:23 crc kubenswrapper[4710]: I1009 09:38:23.235120 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pzhvk" Oct 09 09:38:23 crc kubenswrapper[4710]: I1009 09:38:23.933099 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pzhvk" Oct 09 09:38:23 crc kubenswrapper[4710]: I1009 09:38:23.968287 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pzhvk"] Oct 09 09:38:25 crc kubenswrapper[4710]: I1009 09:38:25.911752 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pzhvk" podUID="b38ca396-bd17-457b-a3c2-410aaba647de" containerName="registry-server" containerID="cri-o://b15ec956d0bd545aa4917261ee527a66d981cc0b724e29394a151286406d9bd0" gracePeriod=2 Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.290150 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pzhvk" Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.405074 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b38ca396-bd17-457b-a3c2-410aaba647de-utilities\") pod \"b38ca396-bd17-457b-a3c2-410aaba647de\" (UID: \"b38ca396-bd17-457b-a3c2-410aaba647de\") " Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.405112 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d57qb\" (UniqueName: \"kubernetes.io/projected/b38ca396-bd17-457b-a3c2-410aaba647de-kube-api-access-d57qb\") pod \"b38ca396-bd17-457b-a3c2-410aaba647de\" (UID: \"b38ca396-bd17-457b-a3c2-410aaba647de\") " Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.405282 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b38ca396-bd17-457b-a3c2-410aaba647de-catalog-content\") pod \"b38ca396-bd17-457b-a3c2-410aaba647de\" (UID: \"b38ca396-bd17-457b-a3c2-410aaba647de\") " Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.406797 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b38ca396-bd17-457b-a3c2-410aaba647de-utilities" (OuterVolumeSpecName: "utilities") pod "b38ca396-bd17-457b-a3c2-410aaba647de" (UID: "b38ca396-bd17-457b-a3c2-410aaba647de"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.412564 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b38ca396-bd17-457b-a3c2-410aaba647de-kube-api-access-d57qb" (OuterVolumeSpecName: "kube-api-access-d57qb") pod "b38ca396-bd17-457b-a3c2-410aaba647de" (UID: "b38ca396-bd17-457b-a3c2-410aaba647de"). InnerVolumeSpecName "kube-api-access-d57qb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.415050 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b38ca396-bd17-457b-a3c2-410aaba647de-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b38ca396-bd17-457b-a3c2-410aaba647de" (UID: "b38ca396-bd17-457b-a3c2-410aaba647de"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.507295 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b38ca396-bd17-457b-a3c2-410aaba647de-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.507322 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b38ca396-bd17-457b-a3c2-410aaba647de-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.507331 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d57qb\" (UniqueName: \"kubernetes.io/projected/b38ca396-bd17-457b-a3c2-410aaba647de-kube-api-access-d57qb\") on node \"crc\" DevicePath \"\"" Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.919461 4710 generic.go:334] "Generic (PLEG): container finished" podID="b38ca396-bd17-457b-a3c2-410aaba647de" containerID="b15ec956d0bd545aa4917261ee527a66d981cc0b724e29394a151286406d9bd0" exitCode=0 Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.919497 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pzhvk" event={"ID":"b38ca396-bd17-457b-a3c2-410aaba647de","Type":"ContainerDied","Data":"b15ec956d0bd545aa4917261ee527a66d981cc0b724e29394a151286406d9bd0"} Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.919537 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pzhvk" event={"ID":"b38ca396-bd17-457b-a3c2-410aaba647de","Type":"ContainerDied","Data":"2a337297b9936e2df24b6cda6a53c20927d927578d86eebad6aed6170e24ae41"} Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.919556 4710 scope.go:117] "RemoveContainer" containerID="b15ec956d0bd545aa4917261ee527a66d981cc0b724e29394a151286406d9bd0" Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.919500 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pzhvk" Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.937387 4710 scope.go:117] "RemoveContainer" containerID="28d4b961d4eb28a6a68e26a4908bb5f54e5a8d3f090952e283cdd5859016848e" Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.938466 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pzhvk"] Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.950336 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pzhvk"] Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.970220 4710 scope.go:117] "RemoveContainer" containerID="5c6c5a7e6fabd02618cc64425c55488c26fbf77cc57a4fdf5877ff24ebfb31a9" Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.990871 4710 scope.go:117] "RemoveContainer" containerID="b15ec956d0bd545aa4917261ee527a66d981cc0b724e29394a151286406d9bd0" Oct 09 09:38:26 crc kubenswrapper[4710]: E1009 09:38:26.991353 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b15ec956d0bd545aa4917261ee527a66d981cc0b724e29394a151286406d9bd0\": container with ID starting with b15ec956d0bd545aa4917261ee527a66d981cc0b724e29394a151286406d9bd0 not found: ID does not exist" containerID="b15ec956d0bd545aa4917261ee527a66d981cc0b724e29394a151286406d9bd0" Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.991389 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b15ec956d0bd545aa4917261ee527a66d981cc0b724e29394a151286406d9bd0"} err="failed to get container status \"b15ec956d0bd545aa4917261ee527a66d981cc0b724e29394a151286406d9bd0\": rpc error: code = NotFound desc = could not find container \"b15ec956d0bd545aa4917261ee527a66d981cc0b724e29394a151286406d9bd0\": container with ID starting with b15ec956d0bd545aa4917261ee527a66d981cc0b724e29394a151286406d9bd0 not found: ID does not exist" Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.991413 4710 scope.go:117] "RemoveContainer" containerID="28d4b961d4eb28a6a68e26a4908bb5f54e5a8d3f090952e283cdd5859016848e" Oct 09 09:38:26 crc kubenswrapper[4710]: E1009 09:38:26.992162 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28d4b961d4eb28a6a68e26a4908bb5f54e5a8d3f090952e283cdd5859016848e\": container with ID starting with 28d4b961d4eb28a6a68e26a4908bb5f54e5a8d3f090952e283cdd5859016848e not found: ID does not exist" containerID="28d4b961d4eb28a6a68e26a4908bb5f54e5a8d3f090952e283cdd5859016848e" Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.992201 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28d4b961d4eb28a6a68e26a4908bb5f54e5a8d3f090952e283cdd5859016848e"} err="failed to get container status \"28d4b961d4eb28a6a68e26a4908bb5f54e5a8d3f090952e283cdd5859016848e\": rpc error: code = NotFound desc = could not find container \"28d4b961d4eb28a6a68e26a4908bb5f54e5a8d3f090952e283cdd5859016848e\": container with ID starting with 28d4b961d4eb28a6a68e26a4908bb5f54e5a8d3f090952e283cdd5859016848e not found: ID does not exist" Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.992227 4710 scope.go:117] "RemoveContainer" containerID="5c6c5a7e6fabd02618cc64425c55488c26fbf77cc57a4fdf5877ff24ebfb31a9" Oct 09 09:38:26 crc kubenswrapper[4710]: E1009 09:38:26.992542 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c6c5a7e6fabd02618cc64425c55488c26fbf77cc57a4fdf5877ff24ebfb31a9\": container with ID starting with 5c6c5a7e6fabd02618cc64425c55488c26fbf77cc57a4fdf5877ff24ebfb31a9 not found: ID does not exist" containerID="5c6c5a7e6fabd02618cc64425c55488c26fbf77cc57a4fdf5877ff24ebfb31a9" Oct 09 09:38:26 crc kubenswrapper[4710]: I1009 09:38:26.992618 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c6c5a7e6fabd02618cc64425c55488c26fbf77cc57a4fdf5877ff24ebfb31a9"} err="failed to get container status \"5c6c5a7e6fabd02618cc64425c55488c26fbf77cc57a4fdf5877ff24ebfb31a9\": rpc error: code = NotFound desc = could not find container \"5c6c5a7e6fabd02618cc64425c55488c26fbf77cc57a4fdf5877ff24ebfb31a9\": container with ID starting with 5c6c5a7e6fabd02618cc64425c55488c26fbf77cc57a4fdf5877ff24ebfb31a9 not found: ID does not exist" Oct 09 09:38:28 crc kubenswrapper[4710]: I1009 09:38:28.823028 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b38ca396-bd17-457b-a3c2-410aaba647de" path="/var/lib/kubelet/pods/b38ca396-bd17-457b-a3c2-410aaba647de/volumes" Oct 09 09:38:43 crc kubenswrapper[4710]: I1009 09:38:43.320959 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bf649"] Oct 09 09:38:43 crc kubenswrapper[4710]: E1009 09:38:43.322013 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b38ca396-bd17-457b-a3c2-410aaba647de" containerName="registry-server" Oct 09 09:38:43 crc kubenswrapper[4710]: I1009 09:38:43.322027 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="b38ca396-bd17-457b-a3c2-410aaba647de" containerName="registry-server" Oct 09 09:38:43 crc kubenswrapper[4710]: E1009 09:38:43.322044 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b38ca396-bd17-457b-a3c2-410aaba647de" containerName="extract-content" Oct 09 09:38:43 crc kubenswrapper[4710]: I1009 09:38:43.322050 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="b38ca396-bd17-457b-a3c2-410aaba647de" containerName="extract-content" Oct 09 09:38:43 crc kubenswrapper[4710]: E1009 09:38:43.322349 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b38ca396-bd17-457b-a3c2-410aaba647de" containerName="extract-utilities" Oct 09 09:38:43 crc kubenswrapper[4710]: I1009 09:38:43.322369 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="b38ca396-bd17-457b-a3c2-410aaba647de" containerName="extract-utilities" Oct 09 09:38:43 crc kubenswrapper[4710]: I1009 09:38:43.322574 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="b38ca396-bd17-457b-a3c2-410aaba647de" containerName="registry-server" Oct 09 09:38:43 crc kubenswrapper[4710]: I1009 09:38:43.324643 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bf649" Oct 09 09:38:43 crc kubenswrapper[4710]: I1009 09:38:43.352583 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bf649"] Oct 09 09:38:43 crc kubenswrapper[4710]: I1009 09:38:43.390767 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsp2j\" (UniqueName: \"kubernetes.io/projected/2d0cfb93-bad6-4a0a-9875-fe25ae24b878-kube-api-access-rsp2j\") pod \"community-operators-bf649\" (UID: \"2d0cfb93-bad6-4a0a-9875-fe25ae24b878\") " pod="openshift-marketplace/community-operators-bf649" Oct 09 09:38:43 crc kubenswrapper[4710]: I1009 09:38:43.391210 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d0cfb93-bad6-4a0a-9875-fe25ae24b878-catalog-content\") pod \"community-operators-bf649\" (UID: \"2d0cfb93-bad6-4a0a-9875-fe25ae24b878\") " pod="openshift-marketplace/community-operators-bf649" Oct 09 09:38:43 crc kubenswrapper[4710]: I1009 09:38:43.391269 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d0cfb93-bad6-4a0a-9875-fe25ae24b878-utilities\") pod \"community-operators-bf649\" (UID: \"2d0cfb93-bad6-4a0a-9875-fe25ae24b878\") " pod="openshift-marketplace/community-operators-bf649" Oct 09 09:38:43 crc kubenswrapper[4710]: I1009 09:38:43.493544 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d0cfb93-bad6-4a0a-9875-fe25ae24b878-catalog-content\") pod \"community-operators-bf649\" (UID: \"2d0cfb93-bad6-4a0a-9875-fe25ae24b878\") " pod="openshift-marketplace/community-operators-bf649" Oct 09 09:38:43 crc kubenswrapper[4710]: I1009 09:38:43.493585 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d0cfb93-bad6-4a0a-9875-fe25ae24b878-utilities\") pod \"community-operators-bf649\" (UID: \"2d0cfb93-bad6-4a0a-9875-fe25ae24b878\") " pod="openshift-marketplace/community-operators-bf649" Oct 09 09:38:43 crc kubenswrapper[4710]: I1009 09:38:43.493624 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsp2j\" (UniqueName: \"kubernetes.io/projected/2d0cfb93-bad6-4a0a-9875-fe25ae24b878-kube-api-access-rsp2j\") pod \"community-operators-bf649\" (UID: \"2d0cfb93-bad6-4a0a-9875-fe25ae24b878\") " pod="openshift-marketplace/community-operators-bf649" Oct 09 09:38:43 crc kubenswrapper[4710]: I1009 09:38:43.494055 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d0cfb93-bad6-4a0a-9875-fe25ae24b878-utilities\") pod \"community-operators-bf649\" (UID: \"2d0cfb93-bad6-4a0a-9875-fe25ae24b878\") " pod="openshift-marketplace/community-operators-bf649" Oct 09 09:38:43 crc kubenswrapper[4710]: I1009 09:38:43.494229 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d0cfb93-bad6-4a0a-9875-fe25ae24b878-catalog-content\") pod \"community-operators-bf649\" (UID: \"2d0cfb93-bad6-4a0a-9875-fe25ae24b878\") " pod="openshift-marketplace/community-operators-bf649" Oct 09 09:38:43 crc kubenswrapper[4710]: I1009 09:38:43.522603 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsp2j\" (UniqueName: \"kubernetes.io/projected/2d0cfb93-bad6-4a0a-9875-fe25ae24b878-kube-api-access-rsp2j\") pod \"community-operators-bf649\" (UID: \"2d0cfb93-bad6-4a0a-9875-fe25ae24b878\") " pod="openshift-marketplace/community-operators-bf649" Oct 09 09:38:43 crc kubenswrapper[4710]: I1009 09:38:43.662296 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bf649" Oct 09 09:38:44 crc kubenswrapper[4710]: I1009 09:38:44.205029 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bf649"] Oct 09 09:38:45 crc kubenswrapper[4710]: I1009 09:38:45.048255 4710 generic.go:334] "Generic (PLEG): container finished" podID="2d0cfb93-bad6-4a0a-9875-fe25ae24b878" containerID="d721f4e794927c0648edbc3473935f0495e88b63498b948453c80cfc925db2f4" exitCode=0 Oct 09 09:38:45 crc kubenswrapper[4710]: I1009 09:38:45.048339 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bf649" event={"ID":"2d0cfb93-bad6-4a0a-9875-fe25ae24b878","Type":"ContainerDied","Data":"d721f4e794927c0648edbc3473935f0495e88b63498b948453c80cfc925db2f4"} Oct 09 09:38:45 crc kubenswrapper[4710]: I1009 09:38:45.050187 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bf649" event={"ID":"2d0cfb93-bad6-4a0a-9875-fe25ae24b878","Type":"ContainerStarted","Data":"73588a745e85040555e1a38a99934e4db29bdd2ba402afbb749abe8989f50b7e"} Oct 09 09:38:47 crc kubenswrapper[4710]: I1009 09:38:47.073575 4710 generic.go:334] "Generic (PLEG): container finished" podID="2d0cfb93-bad6-4a0a-9875-fe25ae24b878" containerID="615a99a6ada38544127708474df7820e21f663eb5ebb7a430e922014187e0ff5" exitCode=0 Oct 09 09:38:47 crc kubenswrapper[4710]: I1009 09:38:47.073677 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bf649" event={"ID":"2d0cfb93-bad6-4a0a-9875-fe25ae24b878","Type":"ContainerDied","Data":"615a99a6ada38544127708474df7820e21f663eb5ebb7a430e922014187e0ff5"} Oct 09 09:38:48 crc kubenswrapper[4710]: I1009 09:38:48.087703 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bf649" event={"ID":"2d0cfb93-bad6-4a0a-9875-fe25ae24b878","Type":"ContainerStarted","Data":"c6eb20fc36ae254fac0d5168cb83e2fd865b86d1c7775d3149c67320ae98699d"} Oct 09 09:38:53 crc kubenswrapper[4710]: I1009 09:38:53.662904 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bf649" Oct 09 09:38:53 crc kubenswrapper[4710]: I1009 09:38:53.663713 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bf649" Oct 09 09:38:53 crc kubenswrapper[4710]: I1009 09:38:53.700067 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bf649" Oct 09 09:38:53 crc kubenswrapper[4710]: I1009 09:38:53.718751 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bf649" podStartSLOduration=8.209548878 podStartE2EDuration="10.718731233s" podCreationTimestamp="2025-10-09 09:38:43 +0000 UTC" firstStartedPulling="2025-10-09 09:38:45.049844596 +0000 UTC m=+2048.539952994" lastFinishedPulling="2025-10-09 09:38:47.559026952 +0000 UTC m=+2051.049135349" observedRunningTime="2025-10-09 09:38:48.117503084 +0000 UTC m=+2051.607611480" watchObservedRunningTime="2025-10-09 09:38:53.718731233 +0000 UTC m=+2057.208839630" Oct 09 09:38:54 crc kubenswrapper[4710]: I1009 09:38:54.161138 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bf649" Oct 09 09:38:54 crc kubenswrapper[4710]: I1009 09:38:54.218272 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bf649"] Oct 09 09:38:56 crc kubenswrapper[4710]: I1009 09:38:56.142113 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bf649" podUID="2d0cfb93-bad6-4a0a-9875-fe25ae24b878" containerName="registry-server" containerID="cri-o://c6eb20fc36ae254fac0d5168cb83e2fd865b86d1c7775d3149c67320ae98699d" gracePeriod=2 Oct 09 09:38:56 crc kubenswrapper[4710]: I1009 09:38:56.504053 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bf649" Oct 09 09:38:56 crc kubenswrapper[4710]: I1009 09:38:56.591801 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d0cfb93-bad6-4a0a-9875-fe25ae24b878-utilities\") pod \"2d0cfb93-bad6-4a0a-9875-fe25ae24b878\" (UID: \"2d0cfb93-bad6-4a0a-9875-fe25ae24b878\") " Oct 09 09:38:56 crc kubenswrapper[4710]: I1009 09:38:56.591959 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d0cfb93-bad6-4a0a-9875-fe25ae24b878-catalog-content\") pod \"2d0cfb93-bad6-4a0a-9875-fe25ae24b878\" (UID: \"2d0cfb93-bad6-4a0a-9875-fe25ae24b878\") " Oct 09 09:38:56 crc kubenswrapper[4710]: I1009 09:38:56.591987 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rsp2j\" (UniqueName: \"kubernetes.io/projected/2d0cfb93-bad6-4a0a-9875-fe25ae24b878-kube-api-access-rsp2j\") pod \"2d0cfb93-bad6-4a0a-9875-fe25ae24b878\" (UID: \"2d0cfb93-bad6-4a0a-9875-fe25ae24b878\") " Oct 09 09:38:56 crc kubenswrapper[4710]: I1009 09:38:56.592483 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d0cfb93-bad6-4a0a-9875-fe25ae24b878-utilities" (OuterVolumeSpecName: "utilities") pod "2d0cfb93-bad6-4a0a-9875-fe25ae24b878" (UID: "2d0cfb93-bad6-4a0a-9875-fe25ae24b878"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:38:56 crc kubenswrapper[4710]: I1009 09:38:56.592867 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d0cfb93-bad6-4a0a-9875-fe25ae24b878-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:38:56 crc kubenswrapper[4710]: I1009 09:38:56.596537 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d0cfb93-bad6-4a0a-9875-fe25ae24b878-kube-api-access-rsp2j" (OuterVolumeSpecName: "kube-api-access-rsp2j") pod "2d0cfb93-bad6-4a0a-9875-fe25ae24b878" (UID: "2d0cfb93-bad6-4a0a-9875-fe25ae24b878"). InnerVolumeSpecName "kube-api-access-rsp2j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:38:56 crc kubenswrapper[4710]: I1009 09:38:56.630789 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d0cfb93-bad6-4a0a-9875-fe25ae24b878-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2d0cfb93-bad6-4a0a-9875-fe25ae24b878" (UID: "2d0cfb93-bad6-4a0a-9875-fe25ae24b878"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:38:56 crc kubenswrapper[4710]: I1009 09:38:56.695227 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d0cfb93-bad6-4a0a-9875-fe25ae24b878-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:38:56 crc kubenswrapper[4710]: I1009 09:38:56.695269 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rsp2j\" (UniqueName: \"kubernetes.io/projected/2d0cfb93-bad6-4a0a-9875-fe25ae24b878-kube-api-access-rsp2j\") on node \"crc\" DevicePath \"\"" Oct 09 09:38:57 crc kubenswrapper[4710]: I1009 09:38:57.150908 4710 generic.go:334] "Generic (PLEG): container finished" podID="2d0cfb93-bad6-4a0a-9875-fe25ae24b878" containerID="c6eb20fc36ae254fac0d5168cb83e2fd865b86d1c7775d3149c67320ae98699d" exitCode=0 Oct 09 09:38:57 crc kubenswrapper[4710]: I1009 09:38:57.150951 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bf649" event={"ID":"2d0cfb93-bad6-4a0a-9875-fe25ae24b878","Type":"ContainerDied","Data":"c6eb20fc36ae254fac0d5168cb83e2fd865b86d1c7775d3149c67320ae98699d"} Oct 09 09:38:57 crc kubenswrapper[4710]: I1009 09:38:57.150989 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bf649" event={"ID":"2d0cfb93-bad6-4a0a-9875-fe25ae24b878","Type":"ContainerDied","Data":"73588a745e85040555e1a38a99934e4db29bdd2ba402afbb749abe8989f50b7e"} Oct 09 09:38:57 crc kubenswrapper[4710]: I1009 09:38:57.150990 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bf649" Oct 09 09:38:57 crc kubenswrapper[4710]: I1009 09:38:57.151011 4710 scope.go:117] "RemoveContainer" containerID="c6eb20fc36ae254fac0d5168cb83e2fd865b86d1c7775d3149c67320ae98699d" Oct 09 09:38:57 crc kubenswrapper[4710]: I1009 09:38:57.169259 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bf649"] Oct 09 09:38:57 crc kubenswrapper[4710]: I1009 09:38:57.175684 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bf649"] Oct 09 09:38:57 crc kubenswrapper[4710]: I1009 09:38:57.180605 4710 scope.go:117] "RemoveContainer" containerID="615a99a6ada38544127708474df7820e21f663eb5ebb7a430e922014187e0ff5" Oct 09 09:38:57 crc kubenswrapper[4710]: I1009 09:38:57.199585 4710 scope.go:117] "RemoveContainer" containerID="d721f4e794927c0648edbc3473935f0495e88b63498b948453c80cfc925db2f4" Oct 09 09:38:57 crc kubenswrapper[4710]: I1009 09:38:57.226860 4710 scope.go:117] "RemoveContainer" containerID="c6eb20fc36ae254fac0d5168cb83e2fd865b86d1c7775d3149c67320ae98699d" Oct 09 09:38:57 crc kubenswrapper[4710]: E1009 09:38:57.227247 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6eb20fc36ae254fac0d5168cb83e2fd865b86d1c7775d3149c67320ae98699d\": container with ID starting with c6eb20fc36ae254fac0d5168cb83e2fd865b86d1c7775d3149c67320ae98699d not found: ID does not exist" containerID="c6eb20fc36ae254fac0d5168cb83e2fd865b86d1c7775d3149c67320ae98699d" Oct 09 09:38:57 crc kubenswrapper[4710]: I1009 09:38:57.227280 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6eb20fc36ae254fac0d5168cb83e2fd865b86d1c7775d3149c67320ae98699d"} err="failed to get container status \"c6eb20fc36ae254fac0d5168cb83e2fd865b86d1c7775d3149c67320ae98699d\": rpc error: code = NotFound desc = could not find container \"c6eb20fc36ae254fac0d5168cb83e2fd865b86d1c7775d3149c67320ae98699d\": container with ID starting with c6eb20fc36ae254fac0d5168cb83e2fd865b86d1c7775d3149c67320ae98699d not found: ID does not exist" Oct 09 09:38:57 crc kubenswrapper[4710]: I1009 09:38:57.227305 4710 scope.go:117] "RemoveContainer" containerID="615a99a6ada38544127708474df7820e21f663eb5ebb7a430e922014187e0ff5" Oct 09 09:38:57 crc kubenswrapper[4710]: E1009 09:38:57.227753 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"615a99a6ada38544127708474df7820e21f663eb5ebb7a430e922014187e0ff5\": container with ID starting with 615a99a6ada38544127708474df7820e21f663eb5ebb7a430e922014187e0ff5 not found: ID does not exist" containerID="615a99a6ada38544127708474df7820e21f663eb5ebb7a430e922014187e0ff5" Oct 09 09:38:57 crc kubenswrapper[4710]: I1009 09:38:57.227864 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"615a99a6ada38544127708474df7820e21f663eb5ebb7a430e922014187e0ff5"} err="failed to get container status \"615a99a6ada38544127708474df7820e21f663eb5ebb7a430e922014187e0ff5\": rpc error: code = NotFound desc = could not find container \"615a99a6ada38544127708474df7820e21f663eb5ebb7a430e922014187e0ff5\": container with ID starting with 615a99a6ada38544127708474df7820e21f663eb5ebb7a430e922014187e0ff5 not found: ID does not exist" Oct 09 09:38:57 crc kubenswrapper[4710]: I1009 09:38:57.227947 4710 scope.go:117] "RemoveContainer" containerID="d721f4e794927c0648edbc3473935f0495e88b63498b948453c80cfc925db2f4" Oct 09 09:38:57 crc kubenswrapper[4710]: E1009 09:38:57.228266 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d721f4e794927c0648edbc3473935f0495e88b63498b948453c80cfc925db2f4\": container with ID starting with d721f4e794927c0648edbc3473935f0495e88b63498b948453c80cfc925db2f4 not found: ID does not exist" containerID="d721f4e794927c0648edbc3473935f0495e88b63498b948453c80cfc925db2f4" Oct 09 09:38:57 crc kubenswrapper[4710]: I1009 09:38:57.228344 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d721f4e794927c0648edbc3473935f0495e88b63498b948453c80cfc925db2f4"} err="failed to get container status \"d721f4e794927c0648edbc3473935f0495e88b63498b948453c80cfc925db2f4\": rpc error: code = NotFound desc = could not find container \"d721f4e794927c0648edbc3473935f0495e88b63498b948453c80cfc925db2f4\": container with ID starting with d721f4e794927c0648edbc3473935f0495e88b63498b948453c80cfc925db2f4 not found: ID does not exist" Oct 09 09:38:58 crc kubenswrapper[4710]: I1009 09:38:58.823663 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d0cfb93-bad6-4a0a-9875-fe25ae24b878" path="/var/lib/kubelet/pods/2d0cfb93-bad6-4a0a-9875-fe25ae24b878/volumes" Oct 09 09:39:24 crc kubenswrapper[4710]: I1009 09:39:24.330723 4710 generic.go:334] "Generic (PLEG): container finished" podID="1d4d6d90-9993-4a75-8ea7-e6d488a370b0" containerID="1dee5933e367e4dd9c532a2efcebb9995d014db70b6f5305a795cce510b88371" exitCode=0 Oct 09 09:39:24 crc kubenswrapper[4710]: I1009 09:39:24.330810 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" event={"ID":"1d4d6d90-9993-4a75-8ea7-e6d488a370b0","Type":"ContainerDied","Data":"1dee5933e367e4dd9c532a2efcebb9995d014db70b6f5305a795cce510b88371"} Oct 09 09:39:25 crc kubenswrapper[4710]: I1009 09:39:25.641495 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" Oct 09 09:39:25 crc kubenswrapper[4710]: I1009 09:39:25.793497 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-bootstrap-combined-ca-bundle\") pod \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\" (UID: \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\") " Oct 09 09:39:25 crc kubenswrapper[4710]: I1009 09:39:25.793879 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-ssh-key\") pod \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\" (UID: \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\") " Oct 09 09:39:25 crc kubenswrapper[4710]: I1009 09:39:25.794026 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-ceph\") pod \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\" (UID: \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\") " Oct 09 09:39:25 crc kubenswrapper[4710]: I1009 09:39:25.794129 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84b4z\" (UniqueName: \"kubernetes.io/projected/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-kube-api-access-84b4z\") pod \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\" (UID: \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\") " Oct 09 09:39:25 crc kubenswrapper[4710]: I1009 09:39:25.794294 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-inventory\") pod \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\" (UID: \"1d4d6d90-9993-4a75-8ea7-e6d488a370b0\") " Oct 09 09:39:25 crc kubenswrapper[4710]: I1009 09:39:25.799315 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-kube-api-access-84b4z" (OuterVolumeSpecName: "kube-api-access-84b4z") pod "1d4d6d90-9993-4a75-8ea7-e6d488a370b0" (UID: "1d4d6d90-9993-4a75-8ea7-e6d488a370b0"). InnerVolumeSpecName "kube-api-access-84b4z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:39:25 crc kubenswrapper[4710]: I1009 09:39:25.799753 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-ceph" (OuterVolumeSpecName: "ceph") pod "1d4d6d90-9993-4a75-8ea7-e6d488a370b0" (UID: "1d4d6d90-9993-4a75-8ea7-e6d488a370b0"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:39:25 crc kubenswrapper[4710]: I1009 09:39:25.800166 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "1d4d6d90-9993-4a75-8ea7-e6d488a370b0" (UID: "1d4d6d90-9993-4a75-8ea7-e6d488a370b0"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:39:25 crc kubenswrapper[4710]: I1009 09:39:25.815888 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1d4d6d90-9993-4a75-8ea7-e6d488a370b0" (UID: "1d4d6d90-9993-4a75-8ea7-e6d488a370b0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:39:25 crc kubenswrapper[4710]: I1009 09:39:25.816563 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-inventory" (OuterVolumeSpecName: "inventory") pod "1d4d6d90-9993-4a75-8ea7-e6d488a370b0" (UID: "1d4d6d90-9993-4a75-8ea7-e6d488a370b0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:39:25 crc kubenswrapper[4710]: I1009 09:39:25.896262 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:39:25 crc kubenswrapper[4710]: I1009 09:39:25.896289 4710 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 09:39:25 crc kubenswrapper[4710]: I1009 09:39:25.896299 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84b4z\" (UniqueName: \"kubernetes.io/projected/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-kube-api-access-84b4z\") on node \"crc\" DevicePath \"\"" Oct 09 09:39:25 crc kubenswrapper[4710]: I1009 09:39:25.896309 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:39:25 crc kubenswrapper[4710]: I1009 09:39:25.896317 4710 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d4d6d90-9993-4a75-8ea7-e6d488a370b0-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.345014 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" event={"ID":"1d4d6d90-9993-4a75-8ea7-e6d488a370b0","Type":"ContainerDied","Data":"a88b909f144a2b4b5b3567b6566cba1087b7e18c3a0da2f615ea057df68a6f6f"} Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.345049 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a88b909f144a2b4b5b3567b6566cba1087b7e18c3a0da2f615ea057df68a6f6f" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.345051 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.413865 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p"] Oct 09 09:39:26 crc kubenswrapper[4710]: E1009 09:39:26.414237 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d0cfb93-bad6-4a0a-9875-fe25ae24b878" containerName="extract-content" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.414254 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d0cfb93-bad6-4a0a-9875-fe25ae24b878" containerName="extract-content" Oct 09 09:39:26 crc kubenswrapper[4710]: E1009 09:39:26.414267 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d0cfb93-bad6-4a0a-9875-fe25ae24b878" containerName="extract-utilities" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.414273 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d0cfb93-bad6-4a0a-9875-fe25ae24b878" containerName="extract-utilities" Oct 09 09:39:26 crc kubenswrapper[4710]: E1009 09:39:26.414283 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d4d6d90-9993-4a75-8ea7-e6d488a370b0" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.414289 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d4d6d90-9993-4a75-8ea7-e6d488a370b0" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 09 09:39:26 crc kubenswrapper[4710]: E1009 09:39:26.414313 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d0cfb93-bad6-4a0a-9875-fe25ae24b878" containerName="registry-server" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.414318 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d0cfb93-bad6-4a0a-9875-fe25ae24b878" containerName="registry-server" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.414486 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d0cfb93-bad6-4a0a-9875-fe25ae24b878" containerName="registry-server" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.414506 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d4d6d90-9993-4a75-8ea7-e6d488a370b0" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.415056 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.416792 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.422174 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.422377 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.422628 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.422791 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.427591 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p"] Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.510652 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p\" (UID: \"fadd281b-1db6-4170-8ddd-12e4b65a8e5a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.510715 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhntp\" (UniqueName: \"kubernetes.io/projected/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-kube-api-access-qhntp\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p\" (UID: \"fadd281b-1db6-4170-8ddd-12e4b65a8e5a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.510776 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p\" (UID: \"fadd281b-1db6-4170-8ddd-12e4b65a8e5a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.510915 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p\" (UID: \"fadd281b-1db6-4170-8ddd-12e4b65a8e5a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.612460 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p\" (UID: \"fadd281b-1db6-4170-8ddd-12e4b65a8e5a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.612660 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhntp\" (UniqueName: \"kubernetes.io/projected/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-kube-api-access-qhntp\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p\" (UID: \"fadd281b-1db6-4170-8ddd-12e4b65a8e5a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.612774 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p\" (UID: \"fadd281b-1db6-4170-8ddd-12e4b65a8e5a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.612917 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p\" (UID: \"fadd281b-1db6-4170-8ddd-12e4b65a8e5a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.616561 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p\" (UID: \"fadd281b-1db6-4170-8ddd-12e4b65a8e5a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.616714 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-ceph\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p\" (UID: \"fadd281b-1db6-4170-8ddd-12e4b65a8e5a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.616886 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p\" (UID: \"fadd281b-1db6-4170-8ddd-12e4b65a8e5a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.629570 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhntp\" (UniqueName: \"kubernetes.io/projected/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-kube-api-access-qhntp\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p\" (UID: \"fadd281b-1db6-4170-8ddd-12e4b65a8e5a\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p" Oct 09 09:39:26 crc kubenswrapper[4710]: I1009 09:39:26.730619 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p" Oct 09 09:39:27 crc kubenswrapper[4710]: I1009 09:39:27.176559 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p"] Oct 09 09:39:27 crc kubenswrapper[4710]: I1009 09:39:27.358836 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p" event={"ID":"fadd281b-1db6-4170-8ddd-12e4b65a8e5a","Type":"ContainerStarted","Data":"84b8fb6e0a9d3aa44ae54c98ea2cc202538cde87b1adbaeb36af56f555ae2cdb"} Oct 09 09:39:28 crc kubenswrapper[4710]: I1009 09:39:28.377402 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p" event={"ID":"fadd281b-1db6-4170-8ddd-12e4b65a8e5a","Type":"ContainerStarted","Data":"58d815bb80db0d23f98d08ee911bbfa46ee1a6a695c7844505a549eca856b3b3"} Oct 09 09:39:28 crc kubenswrapper[4710]: I1009 09:39:28.398355 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p" podStartSLOduration=1.8514379509999999 podStartE2EDuration="2.39833745s" podCreationTimestamp="2025-10-09 09:39:26 +0000 UTC" firstStartedPulling="2025-10-09 09:39:27.183680619 +0000 UTC m=+2090.673789016" lastFinishedPulling="2025-10-09 09:39:27.730580118 +0000 UTC m=+2091.220688515" observedRunningTime="2025-10-09 09:39:28.393207957 +0000 UTC m=+2091.883316353" watchObservedRunningTime="2025-10-09 09:39:28.39833745 +0000 UTC m=+2091.888445847" Oct 09 09:39:47 crc kubenswrapper[4710]: I1009 09:39:47.527647 4710 generic.go:334] "Generic (PLEG): container finished" podID="fadd281b-1db6-4170-8ddd-12e4b65a8e5a" containerID="58d815bb80db0d23f98d08ee911bbfa46ee1a6a695c7844505a549eca856b3b3" exitCode=0 Oct 09 09:39:47 crc kubenswrapper[4710]: I1009 09:39:47.527744 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p" event={"ID":"fadd281b-1db6-4170-8ddd-12e4b65a8e5a","Type":"ContainerDied","Data":"58d815bb80db0d23f98d08ee911bbfa46ee1a6a695c7844505a549eca856b3b3"} Oct 09 09:39:48 crc kubenswrapper[4710]: I1009 09:39:48.873769 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.052754 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-inventory\") pod \"fadd281b-1db6-4170-8ddd-12e4b65a8e5a\" (UID: \"fadd281b-1db6-4170-8ddd-12e4b65a8e5a\") " Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.052812 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhntp\" (UniqueName: \"kubernetes.io/projected/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-kube-api-access-qhntp\") pod \"fadd281b-1db6-4170-8ddd-12e4b65a8e5a\" (UID: \"fadd281b-1db6-4170-8ddd-12e4b65a8e5a\") " Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.052914 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-ssh-key\") pod \"fadd281b-1db6-4170-8ddd-12e4b65a8e5a\" (UID: \"fadd281b-1db6-4170-8ddd-12e4b65a8e5a\") " Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.052965 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-ceph\") pod \"fadd281b-1db6-4170-8ddd-12e4b65a8e5a\" (UID: \"fadd281b-1db6-4170-8ddd-12e4b65a8e5a\") " Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.058672 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-kube-api-access-qhntp" (OuterVolumeSpecName: "kube-api-access-qhntp") pod "fadd281b-1db6-4170-8ddd-12e4b65a8e5a" (UID: "fadd281b-1db6-4170-8ddd-12e4b65a8e5a"). InnerVolumeSpecName "kube-api-access-qhntp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.058880 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-ceph" (OuterVolumeSpecName: "ceph") pod "fadd281b-1db6-4170-8ddd-12e4b65a8e5a" (UID: "fadd281b-1db6-4170-8ddd-12e4b65a8e5a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.074597 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-inventory" (OuterVolumeSpecName: "inventory") pod "fadd281b-1db6-4170-8ddd-12e4b65a8e5a" (UID: "fadd281b-1db6-4170-8ddd-12e4b65a8e5a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.075572 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fadd281b-1db6-4170-8ddd-12e4b65a8e5a" (UID: "fadd281b-1db6-4170-8ddd-12e4b65a8e5a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.155221 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.155255 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhntp\" (UniqueName: \"kubernetes.io/projected/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-kube-api-access-qhntp\") on node \"crc\" DevicePath \"\"" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.155265 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.155273 4710 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fadd281b-1db6-4170-8ddd-12e4b65a8e5a-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.544944 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p" event={"ID":"fadd281b-1db6-4170-8ddd-12e4b65a8e5a","Type":"ContainerDied","Data":"84b8fb6e0a9d3aa44ae54c98ea2cc202538cde87b1adbaeb36af56f555ae2cdb"} Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.545004 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84b8fb6e0a9d3aa44ae54c98ea2cc202538cde87b1adbaeb36af56f555ae2cdb" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.545083 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.622094 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc"] Oct 09 09:39:49 crc kubenswrapper[4710]: E1009 09:39:49.622406 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fadd281b-1db6-4170-8ddd-12e4b65a8e5a" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.622439 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="fadd281b-1db6-4170-8ddd-12e4b65a8e5a" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.622598 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="fadd281b-1db6-4170-8ddd-12e4b65a8e5a" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.623181 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.629367 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.629672 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.629805 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.629932 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.630060 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.640094 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc"] Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.765240 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/08cf9289-9c53-4831-9bf9-3e0b70a457d5-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc\" (UID: \"08cf9289-9c53-4831-9bf9-3e0b70a457d5\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.765552 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndfcm\" (UniqueName: \"kubernetes.io/projected/08cf9289-9c53-4831-9bf9-3e0b70a457d5-kube-api-access-ndfcm\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc\" (UID: \"08cf9289-9c53-4831-9bf9-3e0b70a457d5\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.765741 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08cf9289-9c53-4831-9bf9-3e0b70a457d5-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc\" (UID: \"08cf9289-9c53-4831-9bf9-3e0b70a457d5\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.765897 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/08cf9289-9c53-4831-9bf9-3e0b70a457d5-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc\" (UID: \"08cf9289-9c53-4831-9bf9-3e0b70a457d5\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.867828 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndfcm\" (UniqueName: \"kubernetes.io/projected/08cf9289-9c53-4831-9bf9-3e0b70a457d5-kube-api-access-ndfcm\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc\" (UID: \"08cf9289-9c53-4831-9bf9-3e0b70a457d5\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.868088 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08cf9289-9c53-4831-9bf9-3e0b70a457d5-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc\" (UID: \"08cf9289-9c53-4831-9bf9-3e0b70a457d5\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.868136 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/08cf9289-9c53-4831-9bf9-3e0b70a457d5-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc\" (UID: \"08cf9289-9c53-4831-9bf9-3e0b70a457d5\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.868184 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/08cf9289-9c53-4831-9bf9-3e0b70a457d5-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc\" (UID: \"08cf9289-9c53-4831-9bf9-3e0b70a457d5\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.873089 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/08cf9289-9c53-4831-9bf9-3e0b70a457d5-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc\" (UID: \"08cf9289-9c53-4831-9bf9-3e0b70a457d5\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.873409 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08cf9289-9c53-4831-9bf9-3e0b70a457d5-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc\" (UID: \"08cf9289-9c53-4831-9bf9-3e0b70a457d5\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.882283 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/08cf9289-9c53-4831-9bf9-3e0b70a457d5-ceph\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc\" (UID: \"08cf9289-9c53-4831-9bf9-3e0b70a457d5\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.883415 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndfcm\" (UniqueName: \"kubernetes.io/projected/08cf9289-9c53-4831-9bf9-3e0b70a457d5-kube-api-access-ndfcm\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc\" (UID: \"08cf9289-9c53-4831-9bf9-3e0b70a457d5\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc" Oct 09 09:39:49 crc kubenswrapper[4710]: I1009 09:39:49.938674 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc" Oct 09 09:39:50 crc kubenswrapper[4710]: I1009 09:39:50.378582 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc"] Oct 09 09:39:50 crc kubenswrapper[4710]: W1009 09:39:50.382546 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08cf9289_9c53_4831_9bf9_3e0b70a457d5.slice/crio-bae6ed5f6d821e79a07eb09b5bb1c9fd774ccfc4ad963a78fdec882736ae79d5 WatchSource:0}: Error finding container bae6ed5f6d821e79a07eb09b5bb1c9fd774ccfc4ad963a78fdec882736ae79d5: Status 404 returned error can't find the container with id bae6ed5f6d821e79a07eb09b5bb1c9fd774ccfc4ad963a78fdec882736ae79d5 Oct 09 09:39:50 crc kubenswrapper[4710]: I1009 09:39:50.552309 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc" event={"ID":"08cf9289-9c53-4831-9bf9-3e0b70a457d5","Type":"ContainerStarted","Data":"bae6ed5f6d821e79a07eb09b5bb1c9fd774ccfc4ad963a78fdec882736ae79d5"} Oct 09 09:39:51 crc kubenswrapper[4710]: I1009 09:39:51.560757 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc" event={"ID":"08cf9289-9c53-4831-9bf9-3e0b70a457d5","Type":"ContainerStarted","Data":"8f8b74d47868be32db9d55ee3ad6bcf408a70fc8ce11f87d61a88054f76fa132"} Oct 09 09:39:55 crc kubenswrapper[4710]: I1009 09:39:55.590872 4710 generic.go:334] "Generic (PLEG): container finished" podID="08cf9289-9c53-4831-9bf9-3e0b70a457d5" containerID="8f8b74d47868be32db9d55ee3ad6bcf408a70fc8ce11f87d61a88054f76fa132" exitCode=0 Oct 09 09:39:55 crc kubenswrapper[4710]: I1009 09:39:55.590953 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc" event={"ID":"08cf9289-9c53-4831-9bf9-3e0b70a457d5","Type":"ContainerDied","Data":"8f8b74d47868be32db9d55ee3ad6bcf408a70fc8ce11f87d61a88054f76fa132"} Oct 09 09:39:56 crc kubenswrapper[4710]: I1009 09:39:56.977320 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.023550 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndfcm\" (UniqueName: \"kubernetes.io/projected/08cf9289-9c53-4831-9bf9-3e0b70a457d5-kube-api-access-ndfcm\") pod \"08cf9289-9c53-4831-9bf9-3e0b70a457d5\" (UID: \"08cf9289-9c53-4831-9bf9-3e0b70a457d5\") " Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.023612 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/08cf9289-9c53-4831-9bf9-3e0b70a457d5-ssh-key\") pod \"08cf9289-9c53-4831-9bf9-3e0b70a457d5\" (UID: \"08cf9289-9c53-4831-9bf9-3e0b70a457d5\") " Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.023629 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08cf9289-9c53-4831-9bf9-3e0b70a457d5-inventory\") pod \"08cf9289-9c53-4831-9bf9-3e0b70a457d5\" (UID: \"08cf9289-9c53-4831-9bf9-3e0b70a457d5\") " Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.023700 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/08cf9289-9c53-4831-9bf9-3e0b70a457d5-ceph\") pod \"08cf9289-9c53-4831-9bf9-3e0b70a457d5\" (UID: \"08cf9289-9c53-4831-9bf9-3e0b70a457d5\") " Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.030328 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08cf9289-9c53-4831-9bf9-3e0b70a457d5-ceph" (OuterVolumeSpecName: "ceph") pod "08cf9289-9c53-4831-9bf9-3e0b70a457d5" (UID: "08cf9289-9c53-4831-9bf9-3e0b70a457d5"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.031071 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08cf9289-9c53-4831-9bf9-3e0b70a457d5-kube-api-access-ndfcm" (OuterVolumeSpecName: "kube-api-access-ndfcm") pod "08cf9289-9c53-4831-9bf9-3e0b70a457d5" (UID: "08cf9289-9c53-4831-9bf9-3e0b70a457d5"). InnerVolumeSpecName "kube-api-access-ndfcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:39:57 crc kubenswrapper[4710]: E1009 09:39:57.045384 4710 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/08cf9289-9c53-4831-9bf9-3e0b70a457d5-ssh-key podName:08cf9289-9c53-4831-9bf9-3e0b70a457d5 nodeName:}" failed. No retries permitted until 2025-10-09 09:39:57.54535157 +0000 UTC m=+2121.035459967 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ssh-key" (UniqueName: "kubernetes.io/secret/08cf9289-9c53-4831-9bf9-3e0b70a457d5-ssh-key") pod "08cf9289-9c53-4831-9bf9-3e0b70a457d5" (UID: "08cf9289-9c53-4831-9bf9-3e0b70a457d5") : error deleting /var/lib/kubelet/pods/08cf9289-9c53-4831-9bf9-3e0b70a457d5/volume-subpaths: remove /var/lib/kubelet/pods/08cf9289-9c53-4831-9bf9-3e0b70a457d5/volume-subpaths: no such file or directory Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.047835 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08cf9289-9c53-4831-9bf9-3e0b70a457d5-inventory" (OuterVolumeSpecName: "inventory") pod "08cf9289-9c53-4831-9bf9-3e0b70a457d5" (UID: "08cf9289-9c53-4831-9bf9-3e0b70a457d5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.125324 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndfcm\" (UniqueName: \"kubernetes.io/projected/08cf9289-9c53-4831-9bf9-3e0b70a457d5-kube-api-access-ndfcm\") on node \"crc\" DevicePath \"\"" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.125355 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/08cf9289-9c53-4831-9bf9-3e0b70a457d5-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.125363 4710 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/08cf9289-9c53-4831-9bf9-3e0b70a457d5-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.608294 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc" event={"ID":"08cf9289-9c53-4831-9bf9-3e0b70a457d5","Type":"ContainerDied","Data":"bae6ed5f6d821e79a07eb09b5bb1c9fd774ccfc4ad963a78fdec882736ae79d5"} Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.608636 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bae6ed5f6d821e79a07eb09b5bb1c9fd774ccfc4ad963a78fdec882736ae79d5" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.608351 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.633154 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/08cf9289-9c53-4831-9bf9-3e0b70a457d5-ssh-key\") pod \"08cf9289-9c53-4831-9bf9-3e0b70a457d5\" (UID: \"08cf9289-9c53-4831-9bf9-3e0b70a457d5\") " Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.636597 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08cf9289-9c53-4831-9bf9-3e0b70a457d5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "08cf9289-9c53-4831-9bf9-3e0b70a457d5" (UID: "08cf9289-9c53-4831-9bf9-3e0b70a457d5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.709479 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh"] Oct 09 09:39:57 crc kubenswrapper[4710]: E1009 09:39:57.709959 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08cf9289-9c53-4831-9bf9-3e0b70a457d5" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.709984 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="08cf9289-9c53-4831-9bf9-3e0b70a457d5" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.710251 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="08cf9289-9c53-4831-9bf9-3e0b70a457d5" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.711905 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.720582 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh"] Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.735971 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/95b718d5-c979-4a2f-82a5-e0915b769b7a-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-wtmjh\" (UID: \"95b718d5-c979-4a2f-82a5-e0915b769b7a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.736046 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wj44f\" (UniqueName: \"kubernetes.io/projected/95b718d5-c979-4a2f-82a5-e0915b769b7a-kube-api-access-wj44f\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-wtmjh\" (UID: \"95b718d5-c979-4a2f-82a5-e0915b769b7a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.736088 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/95b718d5-c979-4a2f-82a5-e0915b769b7a-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-wtmjh\" (UID: \"95b718d5-c979-4a2f-82a5-e0915b769b7a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.736170 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/95b718d5-c979-4a2f-82a5-e0915b769b7a-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-wtmjh\" (UID: \"95b718d5-c979-4a2f-82a5-e0915b769b7a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.736224 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/08cf9289-9c53-4831-9bf9-3e0b70a457d5-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.837367 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/95b718d5-c979-4a2f-82a5-e0915b769b7a-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-wtmjh\" (UID: \"95b718d5-c979-4a2f-82a5-e0915b769b7a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.837457 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/95b718d5-c979-4a2f-82a5-e0915b769b7a-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-wtmjh\" (UID: \"95b718d5-c979-4a2f-82a5-e0915b769b7a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.837586 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wj44f\" (UniqueName: \"kubernetes.io/projected/95b718d5-c979-4a2f-82a5-e0915b769b7a-kube-api-access-wj44f\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-wtmjh\" (UID: \"95b718d5-c979-4a2f-82a5-e0915b769b7a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.837666 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/95b718d5-c979-4a2f-82a5-e0915b769b7a-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-wtmjh\" (UID: \"95b718d5-c979-4a2f-82a5-e0915b769b7a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.842206 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/95b718d5-c979-4a2f-82a5-e0915b769b7a-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-wtmjh\" (UID: \"95b718d5-c979-4a2f-82a5-e0915b769b7a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.842267 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/95b718d5-c979-4a2f-82a5-e0915b769b7a-ceph\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-wtmjh\" (UID: \"95b718d5-c979-4a2f-82a5-e0915b769b7a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.842924 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/95b718d5-c979-4a2f-82a5-e0915b769b7a-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-wtmjh\" (UID: \"95b718d5-c979-4a2f-82a5-e0915b769b7a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh" Oct 09 09:39:57 crc kubenswrapper[4710]: I1009 09:39:57.853573 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wj44f\" (UniqueName: \"kubernetes.io/projected/95b718d5-c979-4a2f-82a5-e0915b769b7a-kube-api-access-wj44f\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-wtmjh\" (UID: \"95b718d5-c979-4a2f-82a5-e0915b769b7a\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh" Oct 09 09:39:58 crc kubenswrapper[4710]: I1009 09:39:58.028608 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh" Oct 09 09:39:58 crc kubenswrapper[4710]: I1009 09:39:58.480597 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh"] Oct 09 09:39:58 crc kubenswrapper[4710]: I1009 09:39:58.616036 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh" event={"ID":"95b718d5-c979-4a2f-82a5-e0915b769b7a","Type":"ContainerStarted","Data":"9d4fd0d581b7df35b670dfe317920a6c94ef0a62aee3905b3c32e7f7af489ae3"} Oct 09 09:39:59 crc kubenswrapper[4710]: I1009 09:39:59.626531 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh" event={"ID":"95b718d5-c979-4a2f-82a5-e0915b769b7a","Type":"ContainerStarted","Data":"0ab90eb92b74f9708e22393a6c46d0c871ac629cd2a3739b0da2e1ff1261dd10"} Oct 09 09:39:59 crc kubenswrapper[4710]: I1009 09:39:59.657139 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh" podStartSLOduration=2.051771738 podStartE2EDuration="2.657085275s" podCreationTimestamp="2025-10-09 09:39:57 +0000 UTC" firstStartedPulling="2025-10-09 09:39:58.491935974 +0000 UTC m=+2121.982044371" lastFinishedPulling="2025-10-09 09:39:59.09724951 +0000 UTC m=+2122.587357908" observedRunningTime="2025-10-09 09:39:59.640952712 +0000 UTC m=+2123.131061109" watchObservedRunningTime="2025-10-09 09:39:59.657085275 +0000 UTC m=+2123.147193672" Oct 09 09:40:29 crc kubenswrapper[4710]: I1009 09:40:29.871861 4710 generic.go:334] "Generic (PLEG): container finished" podID="95b718d5-c979-4a2f-82a5-e0915b769b7a" containerID="0ab90eb92b74f9708e22393a6c46d0c871ac629cd2a3739b0da2e1ff1261dd10" exitCode=0 Oct 09 09:40:29 crc kubenswrapper[4710]: I1009 09:40:29.872038 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh" event={"ID":"95b718d5-c979-4a2f-82a5-e0915b769b7a","Type":"ContainerDied","Data":"0ab90eb92b74f9708e22393a6c46d0c871ac629cd2a3739b0da2e1ff1261dd10"} Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.258298 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh" Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.350951 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/95b718d5-c979-4a2f-82a5-e0915b769b7a-inventory\") pod \"95b718d5-c979-4a2f-82a5-e0915b769b7a\" (UID: \"95b718d5-c979-4a2f-82a5-e0915b769b7a\") " Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.351133 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wj44f\" (UniqueName: \"kubernetes.io/projected/95b718d5-c979-4a2f-82a5-e0915b769b7a-kube-api-access-wj44f\") pod \"95b718d5-c979-4a2f-82a5-e0915b769b7a\" (UID: \"95b718d5-c979-4a2f-82a5-e0915b769b7a\") " Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.351158 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/95b718d5-c979-4a2f-82a5-e0915b769b7a-ceph\") pod \"95b718d5-c979-4a2f-82a5-e0915b769b7a\" (UID: \"95b718d5-c979-4a2f-82a5-e0915b769b7a\") " Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.351195 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/95b718d5-c979-4a2f-82a5-e0915b769b7a-ssh-key\") pod \"95b718d5-c979-4a2f-82a5-e0915b769b7a\" (UID: \"95b718d5-c979-4a2f-82a5-e0915b769b7a\") " Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.356709 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95b718d5-c979-4a2f-82a5-e0915b769b7a-kube-api-access-wj44f" (OuterVolumeSpecName: "kube-api-access-wj44f") pod "95b718d5-c979-4a2f-82a5-e0915b769b7a" (UID: "95b718d5-c979-4a2f-82a5-e0915b769b7a"). InnerVolumeSpecName "kube-api-access-wj44f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.356862 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95b718d5-c979-4a2f-82a5-e0915b769b7a-ceph" (OuterVolumeSpecName: "ceph") pod "95b718d5-c979-4a2f-82a5-e0915b769b7a" (UID: "95b718d5-c979-4a2f-82a5-e0915b769b7a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.372155 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95b718d5-c979-4a2f-82a5-e0915b769b7a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "95b718d5-c979-4a2f-82a5-e0915b769b7a" (UID: "95b718d5-c979-4a2f-82a5-e0915b769b7a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.376094 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95b718d5-c979-4a2f-82a5-e0915b769b7a-inventory" (OuterVolumeSpecName: "inventory") pod "95b718d5-c979-4a2f-82a5-e0915b769b7a" (UID: "95b718d5-c979-4a2f-82a5-e0915b769b7a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.453279 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/95b718d5-c979-4a2f-82a5-e0915b769b7a-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.453311 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wj44f\" (UniqueName: \"kubernetes.io/projected/95b718d5-c979-4a2f-82a5-e0915b769b7a-kube-api-access-wj44f\") on node \"crc\" DevicePath \"\"" Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.453324 4710 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/95b718d5-c979-4a2f-82a5-e0915b769b7a-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.453331 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/95b718d5-c979-4a2f-82a5-e0915b769b7a-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.889835 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh" event={"ID":"95b718d5-c979-4a2f-82a5-e0915b769b7a","Type":"ContainerDied","Data":"9d4fd0d581b7df35b670dfe317920a6c94ef0a62aee3905b3c32e7f7af489ae3"} Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.889877 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9d4fd0d581b7df35b670dfe317920a6c94ef0a62aee3905b3c32e7f7af489ae3" Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.890081 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-wtmjh" Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.976981 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk"] Oct 09 09:40:31 crc kubenswrapper[4710]: E1009 09:40:31.977650 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95b718d5-c979-4a2f-82a5-e0915b769b7a" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.977671 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="95b718d5-c979-4a2f-82a5-e0915b769b7a" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.977876 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="95b718d5-c979-4a2f-82a5-e0915b769b7a" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.978574 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk" Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.981926 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.982087 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.982165 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.982250 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.982255 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:40:31 crc kubenswrapper[4710]: I1009 09:40:31.988546 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk"] Oct 09 09:40:32 crc kubenswrapper[4710]: I1009 09:40:32.065559 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f25f8196-f0c2-4299-8488-0538f69a70a0-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk\" (UID: \"f25f8196-f0c2-4299-8488-0538f69a70a0\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk" Oct 09 09:40:32 crc kubenswrapper[4710]: I1009 09:40:32.065770 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f25f8196-f0c2-4299-8488-0538f69a70a0-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk\" (UID: \"f25f8196-f0c2-4299-8488-0538f69a70a0\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk" Oct 09 09:40:32 crc kubenswrapper[4710]: I1009 09:40:32.065809 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sbjs\" (UniqueName: \"kubernetes.io/projected/f25f8196-f0c2-4299-8488-0538f69a70a0-kube-api-access-9sbjs\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk\" (UID: \"f25f8196-f0c2-4299-8488-0538f69a70a0\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk" Oct 09 09:40:32 crc kubenswrapper[4710]: I1009 09:40:32.065976 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f25f8196-f0c2-4299-8488-0538f69a70a0-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk\" (UID: \"f25f8196-f0c2-4299-8488-0538f69a70a0\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk" Oct 09 09:40:32 crc kubenswrapper[4710]: I1009 09:40:32.167332 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f25f8196-f0c2-4299-8488-0538f69a70a0-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk\" (UID: \"f25f8196-f0c2-4299-8488-0538f69a70a0\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk" Oct 09 09:40:32 crc kubenswrapper[4710]: I1009 09:40:32.167378 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sbjs\" (UniqueName: \"kubernetes.io/projected/f25f8196-f0c2-4299-8488-0538f69a70a0-kube-api-access-9sbjs\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk\" (UID: \"f25f8196-f0c2-4299-8488-0538f69a70a0\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk" Oct 09 09:40:32 crc kubenswrapper[4710]: I1009 09:40:32.167472 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f25f8196-f0c2-4299-8488-0538f69a70a0-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk\" (UID: \"f25f8196-f0c2-4299-8488-0538f69a70a0\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk" Oct 09 09:40:32 crc kubenswrapper[4710]: I1009 09:40:32.167489 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f25f8196-f0c2-4299-8488-0538f69a70a0-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk\" (UID: \"f25f8196-f0c2-4299-8488-0538f69a70a0\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk" Oct 09 09:40:32 crc kubenswrapper[4710]: I1009 09:40:32.172032 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f25f8196-f0c2-4299-8488-0538f69a70a0-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk\" (UID: \"f25f8196-f0c2-4299-8488-0538f69a70a0\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk" Oct 09 09:40:32 crc kubenswrapper[4710]: I1009 09:40:32.172064 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f25f8196-f0c2-4299-8488-0538f69a70a0-ceph\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk\" (UID: \"f25f8196-f0c2-4299-8488-0538f69a70a0\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk" Oct 09 09:40:32 crc kubenswrapper[4710]: I1009 09:40:32.172650 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f25f8196-f0c2-4299-8488-0538f69a70a0-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk\" (UID: \"f25f8196-f0c2-4299-8488-0538f69a70a0\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk" Oct 09 09:40:32 crc kubenswrapper[4710]: I1009 09:40:32.184482 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sbjs\" (UniqueName: \"kubernetes.io/projected/f25f8196-f0c2-4299-8488-0538f69a70a0-kube-api-access-9sbjs\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk\" (UID: \"f25f8196-f0c2-4299-8488-0538f69a70a0\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk" Oct 09 09:40:32 crc kubenswrapper[4710]: I1009 09:40:32.293962 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk" Oct 09 09:40:32 crc kubenswrapper[4710]: I1009 09:40:32.804228 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk"] Oct 09 09:40:32 crc kubenswrapper[4710]: I1009 09:40:32.899837 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk" event={"ID":"f25f8196-f0c2-4299-8488-0538f69a70a0","Type":"ContainerStarted","Data":"66d6be0182699c98ff19a27340abc30722e7f25e7dc875e125ae2b4f0f972ed1"} Oct 09 09:40:33 crc kubenswrapper[4710]: I1009 09:40:33.909712 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk" event={"ID":"f25f8196-f0c2-4299-8488-0538f69a70a0","Type":"ContainerStarted","Data":"0614272a7ad00e542b5a4df184dbdc6765394fbce61e479354236d474c4d71d0"} Oct 09 09:40:33 crc kubenswrapper[4710]: I1009 09:40:33.932048 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk" podStartSLOduration=2.31240206 podStartE2EDuration="2.932028151s" podCreationTimestamp="2025-10-09 09:40:31 +0000 UTC" firstStartedPulling="2025-10-09 09:40:32.797782367 +0000 UTC m=+2156.287890765" lastFinishedPulling="2025-10-09 09:40:33.417408459 +0000 UTC m=+2156.907516856" observedRunningTime="2025-10-09 09:40:33.930282749 +0000 UTC m=+2157.420391166" watchObservedRunningTime="2025-10-09 09:40:33.932028151 +0000 UTC m=+2157.422136548" Oct 09 09:40:35 crc kubenswrapper[4710]: I1009 09:40:35.545809 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:40:35 crc kubenswrapper[4710]: I1009 09:40:35.546163 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:40:36 crc kubenswrapper[4710]: I1009 09:40:36.931155 4710 generic.go:334] "Generic (PLEG): container finished" podID="f25f8196-f0c2-4299-8488-0538f69a70a0" containerID="0614272a7ad00e542b5a4df184dbdc6765394fbce61e479354236d474c4d71d0" exitCode=0 Oct 09 09:40:36 crc kubenswrapper[4710]: I1009 09:40:36.931193 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk" event={"ID":"f25f8196-f0c2-4299-8488-0538f69a70a0","Type":"ContainerDied","Data":"0614272a7ad00e542b5a4df184dbdc6765394fbce61e479354236d474c4d71d0"} Oct 09 09:40:38 crc kubenswrapper[4710]: I1009 09:40:38.241862 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk" Oct 09 09:40:38 crc kubenswrapper[4710]: I1009 09:40:38.392845 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9sbjs\" (UniqueName: \"kubernetes.io/projected/f25f8196-f0c2-4299-8488-0538f69a70a0-kube-api-access-9sbjs\") pod \"f25f8196-f0c2-4299-8488-0538f69a70a0\" (UID: \"f25f8196-f0c2-4299-8488-0538f69a70a0\") " Oct 09 09:40:38 crc kubenswrapper[4710]: I1009 09:40:38.392964 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f25f8196-f0c2-4299-8488-0538f69a70a0-ceph\") pod \"f25f8196-f0c2-4299-8488-0538f69a70a0\" (UID: \"f25f8196-f0c2-4299-8488-0538f69a70a0\") " Oct 09 09:40:38 crc kubenswrapper[4710]: I1009 09:40:38.393151 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f25f8196-f0c2-4299-8488-0538f69a70a0-inventory\") pod \"f25f8196-f0c2-4299-8488-0538f69a70a0\" (UID: \"f25f8196-f0c2-4299-8488-0538f69a70a0\") " Oct 09 09:40:38 crc kubenswrapper[4710]: I1009 09:40:38.393192 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f25f8196-f0c2-4299-8488-0538f69a70a0-ssh-key\") pod \"f25f8196-f0c2-4299-8488-0538f69a70a0\" (UID: \"f25f8196-f0c2-4299-8488-0538f69a70a0\") " Oct 09 09:40:38 crc kubenswrapper[4710]: I1009 09:40:38.398156 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f25f8196-f0c2-4299-8488-0538f69a70a0-ceph" (OuterVolumeSpecName: "ceph") pod "f25f8196-f0c2-4299-8488-0538f69a70a0" (UID: "f25f8196-f0c2-4299-8488-0538f69a70a0"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:40:38 crc kubenswrapper[4710]: I1009 09:40:38.400466 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f25f8196-f0c2-4299-8488-0538f69a70a0-kube-api-access-9sbjs" (OuterVolumeSpecName: "kube-api-access-9sbjs") pod "f25f8196-f0c2-4299-8488-0538f69a70a0" (UID: "f25f8196-f0c2-4299-8488-0538f69a70a0"). InnerVolumeSpecName "kube-api-access-9sbjs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:40:38 crc kubenswrapper[4710]: I1009 09:40:38.413950 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f25f8196-f0c2-4299-8488-0538f69a70a0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f25f8196-f0c2-4299-8488-0538f69a70a0" (UID: "f25f8196-f0c2-4299-8488-0538f69a70a0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:40:38 crc kubenswrapper[4710]: I1009 09:40:38.415292 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f25f8196-f0c2-4299-8488-0538f69a70a0-inventory" (OuterVolumeSpecName: "inventory") pod "f25f8196-f0c2-4299-8488-0538f69a70a0" (UID: "f25f8196-f0c2-4299-8488-0538f69a70a0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:40:38 crc kubenswrapper[4710]: I1009 09:40:38.494726 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9sbjs\" (UniqueName: \"kubernetes.io/projected/f25f8196-f0c2-4299-8488-0538f69a70a0-kube-api-access-9sbjs\") on node \"crc\" DevicePath \"\"" Oct 09 09:40:38 crc kubenswrapper[4710]: I1009 09:40:38.494753 4710 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f25f8196-f0c2-4299-8488-0538f69a70a0-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 09:40:38 crc kubenswrapper[4710]: I1009 09:40:38.494765 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f25f8196-f0c2-4299-8488-0538f69a70a0-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:40:38 crc kubenswrapper[4710]: I1009 09:40:38.494773 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f25f8196-f0c2-4299-8488-0538f69a70a0-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:40:38 crc kubenswrapper[4710]: I1009 09:40:38.953218 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk" event={"ID":"f25f8196-f0c2-4299-8488-0538f69a70a0","Type":"ContainerDied","Data":"66d6be0182699c98ff19a27340abc30722e7f25e7dc875e125ae2b4f0f972ed1"} Oct 09 09:40:38 crc kubenswrapper[4710]: I1009 09:40:38.953264 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="66d6be0182699c98ff19a27340abc30722e7f25e7dc875e125ae2b4f0f972ed1" Oct 09 09:40:38 crc kubenswrapper[4710]: I1009 09:40:38.953263 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.010943 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb"] Oct 09 09:40:39 crc kubenswrapper[4710]: E1009 09:40:39.011300 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f25f8196-f0c2-4299-8488-0538f69a70a0" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.011321 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="f25f8196-f0c2-4299-8488-0538f69a70a0" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.011525 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="f25f8196-f0c2-4299-8488-0538f69a70a0" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.012162 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.016886 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.017021 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.017081 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.017178 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.017380 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.024600 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb"] Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.104395 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5a884cb4-f253-4c96-9e29-5e60aff6f144-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb\" (UID: \"5a884cb4-f253-4c96-9e29-5e60aff6f144\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.104476 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5a884cb4-f253-4c96-9e29-5e60aff6f144-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb\" (UID: \"5a884cb4-f253-4c96-9e29-5e60aff6f144\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.104527 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5a884cb4-f253-4c96-9e29-5e60aff6f144-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb\" (UID: \"5a884cb4-f253-4c96-9e29-5e60aff6f144\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.104621 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4brs9\" (UniqueName: \"kubernetes.io/projected/5a884cb4-f253-4c96-9e29-5e60aff6f144-kube-api-access-4brs9\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb\" (UID: \"5a884cb4-f253-4c96-9e29-5e60aff6f144\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.206266 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5a884cb4-f253-4c96-9e29-5e60aff6f144-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb\" (UID: \"5a884cb4-f253-4c96-9e29-5e60aff6f144\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.206320 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5a884cb4-f253-4c96-9e29-5e60aff6f144-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb\" (UID: \"5a884cb4-f253-4c96-9e29-5e60aff6f144\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.206356 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5a884cb4-f253-4c96-9e29-5e60aff6f144-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb\" (UID: \"5a884cb4-f253-4c96-9e29-5e60aff6f144\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.206421 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4brs9\" (UniqueName: \"kubernetes.io/projected/5a884cb4-f253-4c96-9e29-5e60aff6f144-kube-api-access-4brs9\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb\" (UID: \"5a884cb4-f253-4c96-9e29-5e60aff6f144\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.210541 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5a884cb4-f253-4c96-9e29-5e60aff6f144-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb\" (UID: \"5a884cb4-f253-4c96-9e29-5e60aff6f144\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.210871 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5a884cb4-f253-4c96-9e29-5e60aff6f144-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb\" (UID: \"5a884cb4-f253-4c96-9e29-5e60aff6f144\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.214268 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5a884cb4-f253-4c96-9e29-5e60aff6f144-ceph\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb\" (UID: \"5a884cb4-f253-4c96-9e29-5e60aff6f144\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.220822 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4brs9\" (UniqueName: \"kubernetes.io/projected/5a884cb4-f253-4c96-9e29-5e60aff6f144-kube-api-access-4brs9\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb\" (UID: \"5a884cb4-f253-4c96-9e29-5e60aff6f144\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.328230 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb" Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.780132 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb"] Oct 09 09:40:39 crc kubenswrapper[4710]: I1009 09:40:39.961958 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb" event={"ID":"5a884cb4-f253-4c96-9e29-5e60aff6f144","Type":"ContainerStarted","Data":"d00bd129ee7516619bb7e340128f356b5105c6a697e7072bd496b38adada5150"} Oct 09 09:40:40 crc kubenswrapper[4710]: I1009 09:40:40.970580 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb" event={"ID":"5a884cb4-f253-4c96-9e29-5e60aff6f144","Type":"ContainerStarted","Data":"63e182480d2bbe7a9cb58bcd6da9af8a8a0316fab2709007a2b6c758732238de"} Oct 09 09:40:40 crc kubenswrapper[4710]: I1009 09:40:40.989319 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb" podStartSLOduration=2.437837635 podStartE2EDuration="2.989294709s" podCreationTimestamp="2025-10-09 09:40:38 +0000 UTC" firstStartedPulling="2025-10-09 09:40:39.790408242 +0000 UTC m=+2163.280516638" lastFinishedPulling="2025-10-09 09:40:40.341865315 +0000 UTC m=+2163.831973712" observedRunningTime="2025-10-09 09:40:40.982171977 +0000 UTC m=+2164.472280374" watchObservedRunningTime="2025-10-09 09:40:40.989294709 +0000 UTC m=+2164.479403106" Oct 09 09:40:41 crc kubenswrapper[4710]: I1009 09:40:41.454158 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lbqqk"] Oct 09 09:40:41 crc kubenswrapper[4710]: I1009 09:40:41.456074 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lbqqk" Oct 09 09:40:41 crc kubenswrapper[4710]: I1009 09:40:41.465562 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lbqqk"] Oct 09 09:40:41 crc kubenswrapper[4710]: I1009 09:40:41.550944 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/967555f3-0213-4080-b701-e43bfe2e6c0d-utilities\") pod \"redhat-operators-lbqqk\" (UID: \"967555f3-0213-4080-b701-e43bfe2e6c0d\") " pod="openshift-marketplace/redhat-operators-lbqqk" Oct 09 09:40:41 crc kubenswrapper[4710]: I1009 09:40:41.551032 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/967555f3-0213-4080-b701-e43bfe2e6c0d-catalog-content\") pod \"redhat-operators-lbqqk\" (UID: \"967555f3-0213-4080-b701-e43bfe2e6c0d\") " pod="openshift-marketplace/redhat-operators-lbqqk" Oct 09 09:40:41 crc kubenswrapper[4710]: I1009 09:40:41.551064 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5gtn\" (UniqueName: \"kubernetes.io/projected/967555f3-0213-4080-b701-e43bfe2e6c0d-kube-api-access-n5gtn\") pod \"redhat-operators-lbqqk\" (UID: \"967555f3-0213-4080-b701-e43bfe2e6c0d\") " pod="openshift-marketplace/redhat-operators-lbqqk" Oct 09 09:40:41 crc kubenswrapper[4710]: I1009 09:40:41.652797 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/967555f3-0213-4080-b701-e43bfe2e6c0d-catalog-content\") pod \"redhat-operators-lbqqk\" (UID: \"967555f3-0213-4080-b701-e43bfe2e6c0d\") " pod="openshift-marketplace/redhat-operators-lbqqk" Oct 09 09:40:41 crc kubenswrapper[4710]: I1009 09:40:41.652848 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5gtn\" (UniqueName: \"kubernetes.io/projected/967555f3-0213-4080-b701-e43bfe2e6c0d-kube-api-access-n5gtn\") pod \"redhat-operators-lbqqk\" (UID: \"967555f3-0213-4080-b701-e43bfe2e6c0d\") " pod="openshift-marketplace/redhat-operators-lbqqk" Oct 09 09:40:41 crc kubenswrapper[4710]: I1009 09:40:41.652951 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/967555f3-0213-4080-b701-e43bfe2e6c0d-utilities\") pod \"redhat-operators-lbqqk\" (UID: \"967555f3-0213-4080-b701-e43bfe2e6c0d\") " pod="openshift-marketplace/redhat-operators-lbqqk" Oct 09 09:40:41 crc kubenswrapper[4710]: I1009 09:40:41.653361 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/967555f3-0213-4080-b701-e43bfe2e6c0d-utilities\") pod \"redhat-operators-lbqqk\" (UID: \"967555f3-0213-4080-b701-e43bfe2e6c0d\") " pod="openshift-marketplace/redhat-operators-lbqqk" Oct 09 09:40:41 crc kubenswrapper[4710]: I1009 09:40:41.653602 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/967555f3-0213-4080-b701-e43bfe2e6c0d-catalog-content\") pod \"redhat-operators-lbqqk\" (UID: \"967555f3-0213-4080-b701-e43bfe2e6c0d\") " pod="openshift-marketplace/redhat-operators-lbqqk" Oct 09 09:40:41 crc kubenswrapper[4710]: I1009 09:40:41.669218 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5gtn\" (UniqueName: \"kubernetes.io/projected/967555f3-0213-4080-b701-e43bfe2e6c0d-kube-api-access-n5gtn\") pod \"redhat-operators-lbqqk\" (UID: \"967555f3-0213-4080-b701-e43bfe2e6c0d\") " pod="openshift-marketplace/redhat-operators-lbqqk" Oct 09 09:40:41 crc kubenswrapper[4710]: I1009 09:40:41.773465 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lbqqk" Oct 09 09:40:42 crc kubenswrapper[4710]: I1009 09:40:42.201998 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lbqqk"] Oct 09 09:40:42 crc kubenswrapper[4710]: I1009 09:40:42.849401 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lbg42"] Oct 09 09:40:42 crc kubenswrapper[4710]: I1009 09:40:42.851397 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lbg42" Oct 09 09:40:42 crc kubenswrapper[4710]: I1009 09:40:42.909570 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lbg42"] Oct 09 09:40:42 crc kubenswrapper[4710]: I1009 09:40:42.979783 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5d2570d-bd07-4bfe-b165-f5747b6d7854-catalog-content\") pod \"certified-operators-lbg42\" (UID: \"e5d2570d-bd07-4bfe-b165-f5747b6d7854\") " pod="openshift-marketplace/certified-operators-lbg42" Oct 09 09:40:42 crc kubenswrapper[4710]: I1009 09:40:42.980019 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97mxk\" (UniqueName: \"kubernetes.io/projected/e5d2570d-bd07-4bfe-b165-f5747b6d7854-kube-api-access-97mxk\") pod \"certified-operators-lbg42\" (UID: \"e5d2570d-bd07-4bfe-b165-f5747b6d7854\") " pod="openshift-marketplace/certified-operators-lbg42" Oct 09 09:40:42 crc kubenswrapper[4710]: I1009 09:40:42.980130 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5d2570d-bd07-4bfe-b165-f5747b6d7854-utilities\") pod \"certified-operators-lbg42\" (UID: \"e5d2570d-bd07-4bfe-b165-f5747b6d7854\") " pod="openshift-marketplace/certified-operators-lbg42" Oct 09 09:40:42 crc kubenswrapper[4710]: I1009 09:40:42.986123 4710 generic.go:334] "Generic (PLEG): container finished" podID="967555f3-0213-4080-b701-e43bfe2e6c0d" containerID="90b9b07b628fbe0da13f0cc60b496ff0987199289d4e3562fff44cdd31212c56" exitCode=0 Oct 09 09:40:42 crc kubenswrapper[4710]: I1009 09:40:42.986162 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lbqqk" event={"ID":"967555f3-0213-4080-b701-e43bfe2e6c0d","Type":"ContainerDied","Data":"90b9b07b628fbe0da13f0cc60b496ff0987199289d4e3562fff44cdd31212c56"} Oct 09 09:40:42 crc kubenswrapper[4710]: I1009 09:40:42.986194 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lbqqk" event={"ID":"967555f3-0213-4080-b701-e43bfe2e6c0d","Type":"ContainerStarted","Data":"ac20441a9480ef045773fd2dcbd5071ae494636ba61468188673d1e3507cb709"} Oct 09 09:40:43 crc kubenswrapper[4710]: I1009 09:40:43.082132 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97mxk\" (UniqueName: \"kubernetes.io/projected/e5d2570d-bd07-4bfe-b165-f5747b6d7854-kube-api-access-97mxk\") pod \"certified-operators-lbg42\" (UID: \"e5d2570d-bd07-4bfe-b165-f5747b6d7854\") " pod="openshift-marketplace/certified-operators-lbg42" Oct 09 09:40:43 crc kubenswrapper[4710]: I1009 09:40:43.082214 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5d2570d-bd07-4bfe-b165-f5747b6d7854-utilities\") pod \"certified-operators-lbg42\" (UID: \"e5d2570d-bd07-4bfe-b165-f5747b6d7854\") " pod="openshift-marketplace/certified-operators-lbg42" Oct 09 09:40:43 crc kubenswrapper[4710]: I1009 09:40:43.082323 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5d2570d-bd07-4bfe-b165-f5747b6d7854-catalog-content\") pod \"certified-operators-lbg42\" (UID: \"e5d2570d-bd07-4bfe-b165-f5747b6d7854\") " pod="openshift-marketplace/certified-operators-lbg42" Oct 09 09:40:43 crc kubenswrapper[4710]: I1009 09:40:43.082681 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5d2570d-bd07-4bfe-b165-f5747b6d7854-utilities\") pod \"certified-operators-lbg42\" (UID: \"e5d2570d-bd07-4bfe-b165-f5747b6d7854\") " pod="openshift-marketplace/certified-operators-lbg42" Oct 09 09:40:43 crc kubenswrapper[4710]: I1009 09:40:43.082734 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5d2570d-bd07-4bfe-b165-f5747b6d7854-catalog-content\") pod \"certified-operators-lbg42\" (UID: \"e5d2570d-bd07-4bfe-b165-f5747b6d7854\") " pod="openshift-marketplace/certified-operators-lbg42" Oct 09 09:40:43 crc kubenswrapper[4710]: I1009 09:40:43.115614 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97mxk\" (UniqueName: \"kubernetes.io/projected/e5d2570d-bd07-4bfe-b165-f5747b6d7854-kube-api-access-97mxk\") pod \"certified-operators-lbg42\" (UID: \"e5d2570d-bd07-4bfe-b165-f5747b6d7854\") " pod="openshift-marketplace/certified-operators-lbg42" Oct 09 09:40:43 crc kubenswrapper[4710]: I1009 09:40:43.171717 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lbg42" Oct 09 09:40:43 crc kubenswrapper[4710]: I1009 09:40:43.648129 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lbg42"] Oct 09 09:40:43 crc kubenswrapper[4710]: I1009 09:40:43.994372 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lbqqk" event={"ID":"967555f3-0213-4080-b701-e43bfe2e6c0d","Type":"ContainerStarted","Data":"828ab126a2e946ac62e37f366781832df036d4ede2f1d02f339e6bdbd0f36984"} Oct 09 09:40:43 crc kubenswrapper[4710]: I1009 09:40:43.996813 4710 generic.go:334] "Generic (PLEG): container finished" podID="e5d2570d-bd07-4bfe-b165-f5747b6d7854" containerID="6c5504ad390580c5db5a4a2ab4845dbef512f87cb01d7409b2b9fc9a046c10bf" exitCode=0 Oct 09 09:40:43 crc kubenswrapper[4710]: I1009 09:40:43.996859 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lbg42" event={"ID":"e5d2570d-bd07-4bfe-b165-f5747b6d7854","Type":"ContainerDied","Data":"6c5504ad390580c5db5a4a2ab4845dbef512f87cb01d7409b2b9fc9a046c10bf"} Oct 09 09:40:43 crc kubenswrapper[4710]: I1009 09:40:43.996891 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lbg42" event={"ID":"e5d2570d-bd07-4bfe-b165-f5747b6d7854","Type":"ContainerStarted","Data":"a9014011f7fac3d49af95937e0226f7f2b77c0c26e8f7fa8deb1b90978fd7acd"} Oct 09 09:40:45 crc kubenswrapper[4710]: I1009 09:40:45.006422 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lbg42" event={"ID":"e5d2570d-bd07-4bfe-b165-f5747b6d7854","Type":"ContainerStarted","Data":"d67eda1f43217e2a487f7674ba8e0f4a654d9afff37c28d623aec2ae3995dacb"} Oct 09 09:40:47 crc kubenswrapper[4710]: I1009 09:40:47.027397 4710 generic.go:334] "Generic (PLEG): container finished" podID="967555f3-0213-4080-b701-e43bfe2e6c0d" containerID="828ab126a2e946ac62e37f366781832df036d4ede2f1d02f339e6bdbd0f36984" exitCode=0 Oct 09 09:40:47 crc kubenswrapper[4710]: I1009 09:40:47.027758 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lbqqk" event={"ID":"967555f3-0213-4080-b701-e43bfe2e6c0d","Type":"ContainerDied","Data":"828ab126a2e946ac62e37f366781832df036d4ede2f1d02f339e6bdbd0f36984"} Oct 09 09:40:47 crc kubenswrapper[4710]: I1009 09:40:47.032379 4710 generic.go:334] "Generic (PLEG): container finished" podID="e5d2570d-bd07-4bfe-b165-f5747b6d7854" containerID="d67eda1f43217e2a487f7674ba8e0f4a654d9afff37c28d623aec2ae3995dacb" exitCode=0 Oct 09 09:40:47 crc kubenswrapper[4710]: I1009 09:40:47.032414 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lbg42" event={"ID":"e5d2570d-bd07-4bfe-b165-f5747b6d7854","Type":"ContainerDied","Data":"d67eda1f43217e2a487f7674ba8e0f4a654d9afff37c28d623aec2ae3995dacb"} Oct 09 09:40:48 crc kubenswrapper[4710]: I1009 09:40:48.045859 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lbg42" event={"ID":"e5d2570d-bd07-4bfe-b165-f5747b6d7854","Type":"ContainerStarted","Data":"c8820ebbfab5fadaf53f602c44a755625f9825a0e5cb7d88b073352435d76dd6"} Oct 09 09:40:48 crc kubenswrapper[4710]: I1009 09:40:48.048804 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lbqqk" event={"ID":"967555f3-0213-4080-b701-e43bfe2e6c0d","Type":"ContainerStarted","Data":"f1d08325b9beecedd81b1e29e5718a04a58b06deac5f4ef5c2238903b11955f4"} Oct 09 09:40:48 crc kubenswrapper[4710]: I1009 09:40:48.069127 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lbg42" podStartSLOduration=2.436655765 podStartE2EDuration="6.069104053s" podCreationTimestamp="2025-10-09 09:40:42 +0000 UTC" firstStartedPulling="2025-10-09 09:40:43.997975916 +0000 UTC m=+2167.488084313" lastFinishedPulling="2025-10-09 09:40:47.630424204 +0000 UTC m=+2171.120532601" observedRunningTime="2025-10-09 09:40:48.068391909 +0000 UTC m=+2171.558500307" watchObservedRunningTime="2025-10-09 09:40:48.069104053 +0000 UTC m=+2171.559212450" Oct 09 09:40:48 crc kubenswrapper[4710]: I1009 09:40:48.087078 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lbqqk" podStartSLOduration=2.375753307 podStartE2EDuration="7.0870364s" podCreationTimestamp="2025-10-09 09:40:41 +0000 UTC" firstStartedPulling="2025-10-09 09:40:42.988042923 +0000 UTC m=+2166.478151319" lastFinishedPulling="2025-10-09 09:40:47.699326015 +0000 UTC m=+2171.189434412" observedRunningTime="2025-10-09 09:40:48.084253231 +0000 UTC m=+2171.574361629" watchObservedRunningTime="2025-10-09 09:40:48.0870364 +0000 UTC m=+2171.577144797" Oct 09 09:40:51 crc kubenswrapper[4710]: I1009 09:40:51.773686 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lbqqk" Oct 09 09:40:51 crc kubenswrapper[4710]: I1009 09:40:51.775587 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lbqqk" Oct 09 09:40:52 crc kubenswrapper[4710]: I1009 09:40:52.815328 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-lbqqk" podUID="967555f3-0213-4080-b701-e43bfe2e6c0d" containerName="registry-server" probeResult="failure" output=< Oct 09 09:40:52 crc kubenswrapper[4710]: timeout: failed to connect service ":50051" within 1s Oct 09 09:40:52 crc kubenswrapper[4710]: > Oct 09 09:40:53 crc kubenswrapper[4710]: I1009 09:40:53.172867 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lbg42" Oct 09 09:40:53 crc kubenswrapper[4710]: I1009 09:40:53.173258 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lbg42" Oct 09 09:40:53 crc kubenswrapper[4710]: I1009 09:40:53.217538 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lbg42" Oct 09 09:40:54 crc kubenswrapper[4710]: I1009 09:40:54.161750 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lbg42" Oct 09 09:40:54 crc kubenswrapper[4710]: I1009 09:40:54.443707 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lbg42"] Oct 09 09:40:56 crc kubenswrapper[4710]: I1009 09:40:56.144165 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lbg42" podUID="e5d2570d-bd07-4bfe-b165-f5747b6d7854" containerName="registry-server" containerID="cri-o://c8820ebbfab5fadaf53f602c44a755625f9825a0e5cb7d88b073352435d76dd6" gracePeriod=2 Oct 09 09:40:56 crc kubenswrapper[4710]: I1009 09:40:56.618357 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lbg42" Oct 09 09:40:56 crc kubenswrapper[4710]: I1009 09:40:56.789921 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5d2570d-bd07-4bfe-b165-f5747b6d7854-catalog-content\") pod \"e5d2570d-bd07-4bfe-b165-f5747b6d7854\" (UID: \"e5d2570d-bd07-4bfe-b165-f5747b6d7854\") " Oct 09 09:40:56 crc kubenswrapper[4710]: I1009 09:40:56.790216 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5d2570d-bd07-4bfe-b165-f5747b6d7854-utilities\") pod \"e5d2570d-bd07-4bfe-b165-f5747b6d7854\" (UID: \"e5d2570d-bd07-4bfe-b165-f5747b6d7854\") " Oct 09 09:40:56 crc kubenswrapper[4710]: I1009 09:40:56.790259 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-97mxk\" (UniqueName: \"kubernetes.io/projected/e5d2570d-bd07-4bfe-b165-f5747b6d7854-kube-api-access-97mxk\") pod \"e5d2570d-bd07-4bfe-b165-f5747b6d7854\" (UID: \"e5d2570d-bd07-4bfe-b165-f5747b6d7854\") " Oct 09 09:40:56 crc kubenswrapper[4710]: I1009 09:40:56.791110 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5d2570d-bd07-4bfe-b165-f5747b6d7854-utilities" (OuterVolumeSpecName: "utilities") pod "e5d2570d-bd07-4bfe-b165-f5747b6d7854" (UID: "e5d2570d-bd07-4bfe-b165-f5747b6d7854"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:40:56 crc kubenswrapper[4710]: I1009 09:40:56.801782 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5d2570d-bd07-4bfe-b165-f5747b6d7854-kube-api-access-97mxk" (OuterVolumeSpecName: "kube-api-access-97mxk") pod "e5d2570d-bd07-4bfe-b165-f5747b6d7854" (UID: "e5d2570d-bd07-4bfe-b165-f5747b6d7854"). InnerVolumeSpecName "kube-api-access-97mxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:40:56 crc kubenswrapper[4710]: I1009 09:40:56.833714 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5d2570d-bd07-4bfe-b165-f5747b6d7854-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e5d2570d-bd07-4bfe-b165-f5747b6d7854" (UID: "e5d2570d-bd07-4bfe-b165-f5747b6d7854"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:40:56 crc kubenswrapper[4710]: I1009 09:40:56.893101 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e5d2570d-bd07-4bfe-b165-f5747b6d7854-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:40:56 crc kubenswrapper[4710]: I1009 09:40:56.893139 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-97mxk\" (UniqueName: \"kubernetes.io/projected/e5d2570d-bd07-4bfe-b165-f5747b6d7854-kube-api-access-97mxk\") on node \"crc\" DevicePath \"\"" Oct 09 09:40:56 crc kubenswrapper[4710]: I1009 09:40:56.893152 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e5d2570d-bd07-4bfe-b165-f5747b6d7854-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:40:57 crc kubenswrapper[4710]: I1009 09:40:57.155613 4710 generic.go:334] "Generic (PLEG): container finished" podID="e5d2570d-bd07-4bfe-b165-f5747b6d7854" containerID="c8820ebbfab5fadaf53f602c44a755625f9825a0e5cb7d88b073352435d76dd6" exitCode=0 Oct 09 09:40:57 crc kubenswrapper[4710]: I1009 09:40:57.155681 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lbg42" event={"ID":"e5d2570d-bd07-4bfe-b165-f5747b6d7854","Type":"ContainerDied","Data":"c8820ebbfab5fadaf53f602c44a755625f9825a0e5cb7d88b073352435d76dd6"} Oct 09 09:40:57 crc kubenswrapper[4710]: I1009 09:40:57.155722 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lbg42" event={"ID":"e5d2570d-bd07-4bfe-b165-f5747b6d7854","Type":"ContainerDied","Data":"a9014011f7fac3d49af95937e0226f7f2b77c0c26e8f7fa8deb1b90978fd7acd"} Oct 09 09:40:57 crc kubenswrapper[4710]: I1009 09:40:57.155744 4710 scope.go:117] "RemoveContainer" containerID="c8820ebbfab5fadaf53f602c44a755625f9825a0e5cb7d88b073352435d76dd6" Oct 09 09:40:57 crc kubenswrapper[4710]: I1009 09:40:57.155950 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lbg42" Oct 09 09:40:57 crc kubenswrapper[4710]: I1009 09:40:57.179369 4710 scope.go:117] "RemoveContainer" containerID="d67eda1f43217e2a487f7674ba8e0f4a654d9afff37c28d623aec2ae3995dacb" Oct 09 09:40:57 crc kubenswrapper[4710]: I1009 09:40:57.201683 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lbg42"] Oct 09 09:40:57 crc kubenswrapper[4710]: I1009 09:40:57.216141 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lbg42"] Oct 09 09:40:57 crc kubenswrapper[4710]: I1009 09:40:57.217167 4710 scope.go:117] "RemoveContainer" containerID="6c5504ad390580c5db5a4a2ab4845dbef512f87cb01d7409b2b9fc9a046c10bf" Oct 09 09:40:57 crc kubenswrapper[4710]: I1009 09:40:57.243900 4710 scope.go:117] "RemoveContainer" containerID="c8820ebbfab5fadaf53f602c44a755625f9825a0e5cb7d88b073352435d76dd6" Oct 09 09:40:57 crc kubenswrapper[4710]: E1009 09:40:57.244462 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8820ebbfab5fadaf53f602c44a755625f9825a0e5cb7d88b073352435d76dd6\": container with ID starting with c8820ebbfab5fadaf53f602c44a755625f9825a0e5cb7d88b073352435d76dd6 not found: ID does not exist" containerID="c8820ebbfab5fadaf53f602c44a755625f9825a0e5cb7d88b073352435d76dd6" Oct 09 09:40:57 crc kubenswrapper[4710]: I1009 09:40:57.244520 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8820ebbfab5fadaf53f602c44a755625f9825a0e5cb7d88b073352435d76dd6"} err="failed to get container status \"c8820ebbfab5fadaf53f602c44a755625f9825a0e5cb7d88b073352435d76dd6\": rpc error: code = NotFound desc = could not find container \"c8820ebbfab5fadaf53f602c44a755625f9825a0e5cb7d88b073352435d76dd6\": container with ID starting with c8820ebbfab5fadaf53f602c44a755625f9825a0e5cb7d88b073352435d76dd6 not found: ID does not exist" Oct 09 09:40:57 crc kubenswrapper[4710]: I1009 09:40:57.244548 4710 scope.go:117] "RemoveContainer" containerID="d67eda1f43217e2a487f7674ba8e0f4a654d9afff37c28d623aec2ae3995dacb" Oct 09 09:40:57 crc kubenswrapper[4710]: E1009 09:40:57.244971 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d67eda1f43217e2a487f7674ba8e0f4a654d9afff37c28d623aec2ae3995dacb\": container with ID starting with d67eda1f43217e2a487f7674ba8e0f4a654d9afff37c28d623aec2ae3995dacb not found: ID does not exist" containerID="d67eda1f43217e2a487f7674ba8e0f4a654d9afff37c28d623aec2ae3995dacb" Oct 09 09:40:57 crc kubenswrapper[4710]: I1009 09:40:57.245010 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d67eda1f43217e2a487f7674ba8e0f4a654d9afff37c28d623aec2ae3995dacb"} err="failed to get container status \"d67eda1f43217e2a487f7674ba8e0f4a654d9afff37c28d623aec2ae3995dacb\": rpc error: code = NotFound desc = could not find container \"d67eda1f43217e2a487f7674ba8e0f4a654d9afff37c28d623aec2ae3995dacb\": container with ID starting with d67eda1f43217e2a487f7674ba8e0f4a654d9afff37c28d623aec2ae3995dacb not found: ID does not exist" Oct 09 09:40:57 crc kubenswrapper[4710]: I1009 09:40:57.245036 4710 scope.go:117] "RemoveContainer" containerID="6c5504ad390580c5db5a4a2ab4845dbef512f87cb01d7409b2b9fc9a046c10bf" Oct 09 09:40:57 crc kubenswrapper[4710]: E1009 09:40:57.245349 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c5504ad390580c5db5a4a2ab4845dbef512f87cb01d7409b2b9fc9a046c10bf\": container with ID starting with 6c5504ad390580c5db5a4a2ab4845dbef512f87cb01d7409b2b9fc9a046c10bf not found: ID does not exist" containerID="6c5504ad390580c5db5a4a2ab4845dbef512f87cb01d7409b2b9fc9a046c10bf" Oct 09 09:40:57 crc kubenswrapper[4710]: I1009 09:40:57.245391 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c5504ad390580c5db5a4a2ab4845dbef512f87cb01d7409b2b9fc9a046c10bf"} err="failed to get container status \"6c5504ad390580c5db5a4a2ab4845dbef512f87cb01d7409b2b9fc9a046c10bf\": rpc error: code = NotFound desc = could not find container \"6c5504ad390580c5db5a4a2ab4845dbef512f87cb01d7409b2b9fc9a046c10bf\": container with ID starting with 6c5504ad390580c5db5a4a2ab4845dbef512f87cb01d7409b2b9fc9a046c10bf not found: ID does not exist" Oct 09 09:40:58 crc kubenswrapper[4710]: I1009 09:40:58.824367 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5d2570d-bd07-4bfe-b165-f5747b6d7854" path="/var/lib/kubelet/pods/e5d2570d-bd07-4bfe-b165-f5747b6d7854/volumes" Oct 09 09:41:01 crc kubenswrapper[4710]: I1009 09:41:01.807554 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lbqqk" Oct 09 09:41:01 crc kubenswrapper[4710]: I1009 09:41:01.841143 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lbqqk" Oct 09 09:41:02 crc kubenswrapper[4710]: I1009 09:41:02.041638 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lbqqk"] Oct 09 09:41:03 crc kubenswrapper[4710]: I1009 09:41:03.212098 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lbqqk" podUID="967555f3-0213-4080-b701-e43bfe2e6c0d" containerName="registry-server" containerID="cri-o://f1d08325b9beecedd81b1e29e5718a04a58b06deac5f4ef5c2238903b11955f4" gracePeriod=2 Oct 09 09:41:03 crc kubenswrapper[4710]: I1009 09:41:03.642577 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lbqqk" Oct 09 09:41:03 crc kubenswrapper[4710]: I1009 09:41:03.745082 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/967555f3-0213-4080-b701-e43bfe2e6c0d-utilities\") pod \"967555f3-0213-4080-b701-e43bfe2e6c0d\" (UID: \"967555f3-0213-4080-b701-e43bfe2e6c0d\") " Oct 09 09:41:03 crc kubenswrapper[4710]: I1009 09:41:03.745209 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/967555f3-0213-4080-b701-e43bfe2e6c0d-catalog-content\") pod \"967555f3-0213-4080-b701-e43bfe2e6c0d\" (UID: \"967555f3-0213-4080-b701-e43bfe2e6c0d\") " Oct 09 09:41:03 crc kubenswrapper[4710]: I1009 09:41:03.745372 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5gtn\" (UniqueName: \"kubernetes.io/projected/967555f3-0213-4080-b701-e43bfe2e6c0d-kube-api-access-n5gtn\") pod \"967555f3-0213-4080-b701-e43bfe2e6c0d\" (UID: \"967555f3-0213-4080-b701-e43bfe2e6c0d\") " Oct 09 09:41:03 crc kubenswrapper[4710]: I1009 09:41:03.746390 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/967555f3-0213-4080-b701-e43bfe2e6c0d-utilities" (OuterVolumeSpecName: "utilities") pod "967555f3-0213-4080-b701-e43bfe2e6c0d" (UID: "967555f3-0213-4080-b701-e43bfe2e6c0d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:41:03 crc kubenswrapper[4710]: I1009 09:41:03.752918 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/967555f3-0213-4080-b701-e43bfe2e6c0d-kube-api-access-n5gtn" (OuterVolumeSpecName: "kube-api-access-n5gtn") pod "967555f3-0213-4080-b701-e43bfe2e6c0d" (UID: "967555f3-0213-4080-b701-e43bfe2e6c0d"). InnerVolumeSpecName "kube-api-access-n5gtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:41:03 crc kubenswrapper[4710]: I1009 09:41:03.824443 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/967555f3-0213-4080-b701-e43bfe2e6c0d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "967555f3-0213-4080-b701-e43bfe2e6c0d" (UID: "967555f3-0213-4080-b701-e43bfe2e6c0d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:41:03 crc kubenswrapper[4710]: I1009 09:41:03.848016 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/967555f3-0213-4080-b701-e43bfe2e6c0d-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:41:03 crc kubenswrapper[4710]: I1009 09:41:03.848043 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/967555f3-0213-4080-b701-e43bfe2e6c0d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:41:03 crc kubenswrapper[4710]: I1009 09:41:03.848055 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5gtn\" (UniqueName: \"kubernetes.io/projected/967555f3-0213-4080-b701-e43bfe2e6c0d-kube-api-access-n5gtn\") on node \"crc\" DevicePath \"\"" Oct 09 09:41:04 crc kubenswrapper[4710]: I1009 09:41:04.223573 4710 generic.go:334] "Generic (PLEG): container finished" podID="967555f3-0213-4080-b701-e43bfe2e6c0d" containerID="f1d08325b9beecedd81b1e29e5718a04a58b06deac5f4ef5c2238903b11955f4" exitCode=0 Oct 09 09:41:04 crc kubenswrapper[4710]: I1009 09:41:04.223621 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lbqqk" event={"ID":"967555f3-0213-4080-b701-e43bfe2e6c0d","Type":"ContainerDied","Data":"f1d08325b9beecedd81b1e29e5718a04a58b06deac5f4ef5c2238903b11955f4"} Oct 09 09:41:04 crc kubenswrapper[4710]: I1009 09:41:04.223660 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lbqqk" Oct 09 09:41:04 crc kubenswrapper[4710]: I1009 09:41:04.223665 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lbqqk" event={"ID":"967555f3-0213-4080-b701-e43bfe2e6c0d","Type":"ContainerDied","Data":"ac20441a9480ef045773fd2dcbd5071ae494636ba61468188673d1e3507cb709"} Oct 09 09:41:04 crc kubenswrapper[4710]: I1009 09:41:04.223701 4710 scope.go:117] "RemoveContainer" containerID="f1d08325b9beecedd81b1e29e5718a04a58b06deac5f4ef5c2238903b11955f4" Oct 09 09:41:04 crc kubenswrapper[4710]: I1009 09:41:04.243132 4710 scope.go:117] "RemoveContainer" containerID="828ab126a2e946ac62e37f366781832df036d4ede2f1d02f339e6bdbd0f36984" Oct 09 09:41:04 crc kubenswrapper[4710]: I1009 09:41:04.263037 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lbqqk"] Oct 09 09:41:04 crc kubenswrapper[4710]: I1009 09:41:04.268836 4710 scope.go:117] "RemoveContainer" containerID="90b9b07b628fbe0da13f0cc60b496ff0987199289d4e3562fff44cdd31212c56" Oct 09 09:41:04 crc kubenswrapper[4710]: I1009 09:41:04.271573 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lbqqk"] Oct 09 09:41:04 crc kubenswrapper[4710]: I1009 09:41:04.303063 4710 scope.go:117] "RemoveContainer" containerID="f1d08325b9beecedd81b1e29e5718a04a58b06deac5f4ef5c2238903b11955f4" Oct 09 09:41:04 crc kubenswrapper[4710]: E1009 09:41:04.303561 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1d08325b9beecedd81b1e29e5718a04a58b06deac5f4ef5c2238903b11955f4\": container with ID starting with f1d08325b9beecedd81b1e29e5718a04a58b06deac5f4ef5c2238903b11955f4 not found: ID does not exist" containerID="f1d08325b9beecedd81b1e29e5718a04a58b06deac5f4ef5c2238903b11955f4" Oct 09 09:41:04 crc kubenswrapper[4710]: I1009 09:41:04.303617 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1d08325b9beecedd81b1e29e5718a04a58b06deac5f4ef5c2238903b11955f4"} err="failed to get container status \"f1d08325b9beecedd81b1e29e5718a04a58b06deac5f4ef5c2238903b11955f4\": rpc error: code = NotFound desc = could not find container \"f1d08325b9beecedd81b1e29e5718a04a58b06deac5f4ef5c2238903b11955f4\": container with ID starting with f1d08325b9beecedd81b1e29e5718a04a58b06deac5f4ef5c2238903b11955f4 not found: ID does not exist" Oct 09 09:41:04 crc kubenswrapper[4710]: I1009 09:41:04.303652 4710 scope.go:117] "RemoveContainer" containerID="828ab126a2e946ac62e37f366781832df036d4ede2f1d02f339e6bdbd0f36984" Oct 09 09:41:04 crc kubenswrapper[4710]: E1009 09:41:04.303991 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"828ab126a2e946ac62e37f366781832df036d4ede2f1d02f339e6bdbd0f36984\": container with ID starting with 828ab126a2e946ac62e37f366781832df036d4ede2f1d02f339e6bdbd0f36984 not found: ID does not exist" containerID="828ab126a2e946ac62e37f366781832df036d4ede2f1d02f339e6bdbd0f36984" Oct 09 09:41:04 crc kubenswrapper[4710]: I1009 09:41:04.304015 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"828ab126a2e946ac62e37f366781832df036d4ede2f1d02f339e6bdbd0f36984"} err="failed to get container status \"828ab126a2e946ac62e37f366781832df036d4ede2f1d02f339e6bdbd0f36984\": rpc error: code = NotFound desc = could not find container \"828ab126a2e946ac62e37f366781832df036d4ede2f1d02f339e6bdbd0f36984\": container with ID starting with 828ab126a2e946ac62e37f366781832df036d4ede2f1d02f339e6bdbd0f36984 not found: ID does not exist" Oct 09 09:41:04 crc kubenswrapper[4710]: I1009 09:41:04.304031 4710 scope.go:117] "RemoveContainer" containerID="90b9b07b628fbe0da13f0cc60b496ff0987199289d4e3562fff44cdd31212c56" Oct 09 09:41:04 crc kubenswrapper[4710]: E1009 09:41:04.304638 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90b9b07b628fbe0da13f0cc60b496ff0987199289d4e3562fff44cdd31212c56\": container with ID starting with 90b9b07b628fbe0da13f0cc60b496ff0987199289d4e3562fff44cdd31212c56 not found: ID does not exist" containerID="90b9b07b628fbe0da13f0cc60b496ff0987199289d4e3562fff44cdd31212c56" Oct 09 09:41:04 crc kubenswrapper[4710]: I1009 09:41:04.304742 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90b9b07b628fbe0da13f0cc60b496ff0987199289d4e3562fff44cdd31212c56"} err="failed to get container status \"90b9b07b628fbe0da13f0cc60b496ff0987199289d4e3562fff44cdd31212c56\": rpc error: code = NotFound desc = could not find container \"90b9b07b628fbe0da13f0cc60b496ff0987199289d4e3562fff44cdd31212c56\": container with ID starting with 90b9b07b628fbe0da13f0cc60b496ff0987199289d4e3562fff44cdd31212c56 not found: ID does not exist" Oct 09 09:41:04 crc kubenswrapper[4710]: I1009 09:41:04.826412 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="967555f3-0213-4080-b701-e43bfe2e6c0d" path="/var/lib/kubelet/pods/967555f3-0213-4080-b701-e43bfe2e6c0d/volumes" Oct 09 09:41:05 crc kubenswrapper[4710]: I1009 09:41:05.545879 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:41:05 crc kubenswrapper[4710]: I1009 09:41:05.545949 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:41:13 crc kubenswrapper[4710]: I1009 09:41:13.314739 4710 generic.go:334] "Generic (PLEG): container finished" podID="5a884cb4-f253-4c96-9e29-5e60aff6f144" containerID="63e182480d2bbe7a9cb58bcd6da9af8a8a0316fab2709007a2b6c758732238de" exitCode=0 Oct 09 09:41:13 crc kubenswrapper[4710]: I1009 09:41:13.314779 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb" event={"ID":"5a884cb4-f253-4c96-9e29-5e60aff6f144","Type":"ContainerDied","Data":"63e182480d2bbe7a9cb58bcd6da9af8a8a0316fab2709007a2b6c758732238de"} Oct 09 09:41:14 crc kubenswrapper[4710]: I1009 09:41:14.673291 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb" Oct 09 09:41:14 crc kubenswrapper[4710]: I1009 09:41:14.753357 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5a884cb4-f253-4c96-9e29-5e60aff6f144-ssh-key\") pod \"5a884cb4-f253-4c96-9e29-5e60aff6f144\" (UID: \"5a884cb4-f253-4c96-9e29-5e60aff6f144\") " Oct 09 09:41:14 crc kubenswrapper[4710]: I1009 09:41:14.753676 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4brs9\" (UniqueName: \"kubernetes.io/projected/5a884cb4-f253-4c96-9e29-5e60aff6f144-kube-api-access-4brs9\") pod \"5a884cb4-f253-4c96-9e29-5e60aff6f144\" (UID: \"5a884cb4-f253-4c96-9e29-5e60aff6f144\") " Oct 09 09:41:14 crc kubenswrapper[4710]: I1009 09:41:14.753857 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5a884cb4-f253-4c96-9e29-5e60aff6f144-ceph\") pod \"5a884cb4-f253-4c96-9e29-5e60aff6f144\" (UID: \"5a884cb4-f253-4c96-9e29-5e60aff6f144\") " Oct 09 09:41:14 crc kubenswrapper[4710]: I1009 09:41:14.754013 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5a884cb4-f253-4c96-9e29-5e60aff6f144-inventory\") pod \"5a884cb4-f253-4c96-9e29-5e60aff6f144\" (UID: \"5a884cb4-f253-4c96-9e29-5e60aff6f144\") " Oct 09 09:41:14 crc kubenswrapper[4710]: I1009 09:41:14.760061 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a884cb4-f253-4c96-9e29-5e60aff6f144-ceph" (OuterVolumeSpecName: "ceph") pod "5a884cb4-f253-4c96-9e29-5e60aff6f144" (UID: "5a884cb4-f253-4c96-9e29-5e60aff6f144"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:41:14 crc kubenswrapper[4710]: I1009 09:41:14.772661 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a884cb4-f253-4c96-9e29-5e60aff6f144-kube-api-access-4brs9" (OuterVolumeSpecName: "kube-api-access-4brs9") pod "5a884cb4-f253-4c96-9e29-5e60aff6f144" (UID: "5a884cb4-f253-4c96-9e29-5e60aff6f144"). InnerVolumeSpecName "kube-api-access-4brs9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:41:14 crc kubenswrapper[4710]: I1009 09:41:14.778656 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a884cb4-f253-4c96-9e29-5e60aff6f144-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5a884cb4-f253-4c96-9e29-5e60aff6f144" (UID: "5a884cb4-f253-4c96-9e29-5e60aff6f144"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:41:14 crc kubenswrapper[4710]: I1009 09:41:14.779077 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a884cb4-f253-4c96-9e29-5e60aff6f144-inventory" (OuterVolumeSpecName: "inventory") pod "5a884cb4-f253-4c96-9e29-5e60aff6f144" (UID: "5a884cb4-f253-4c96-9e29-5e60aff6f144"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:41:14 crc kubenswrapper[4710]: I1009 09:41:14.856893 4710 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/5a884cb4-f253-4c96-9e29-5e60aff6f144-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 09:41:14 crc kubenswrapper[4710]: I1009 09:41:14.856928 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5a884cb4-f253-4c96-9e29-5e60aff6f144-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:41:14 crc kubenswrapper[4710]: I1009 09:41:14.856943 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5a884cb4-f253-4c96-9e29-5e60aff6f144-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:41:14 crc kubenswrapper[4710]: I1009 09:41:14.856956 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4brs9\" (UniqueName: \"kubernetes.io/projected/5a884cb4-f253-4c96-9e29-5e60aff6f144-kube-api-access-4brs9\") on node \"crc\" DevicePath \"\"" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.334529 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb" event={"ID":"5a884cb4-f253-4c96-9e29-5e60aff6f144","Type":"ContainerDied","Data":"d00bd129ee7516619bb7e340128f356b5105c6a697e7072bd496b38adada5150"} Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.335163 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d00bd129ee7516619bb7e340128f356b5105c6a697e7072bd496b38adada5150" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.334596 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.420736 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-6kwm8"] Oct 09 09:41:15 crc kubenswrapper[4710]: E1009 09:41:15.421089 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5d2570d-bd07-4bfe-b165-f5747b6d7854" containerName="extract-utilities" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.421107 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5d2570d-bd07-4bfe-b165-f5747b6d7854" containerName="extract-utilities" Oct 09 09:41:15 crc kubenswrapper[4710]: E1009 09:41:15.421134 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5d2570d-bd07-4bfe-b165-f5747b6d7854" containerName="registry-server" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.421139 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5d2570d-bd07-4bfe-b165-f5747b6d7854" containerName="registry-server" Oct 09 09:41:15 crc kubenswrapper[4710]: E1009 09:41:15.421155 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="967555f3-0213-4080-b701-e43bfe2e6c0d" containerName="extract-utilities" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.421160 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="967555f3-0213-4080-b701-e43bfe2e6c0d" containerName="extract-utilities" Oct 09 09:41:15 crc kubenswrapper[4710]: E1009 09:41:15.421171 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="967555f3-0213-4080-b701-e43bfe2e6c0d" containerName="extract-content" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.421176 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="967555f3-0213-4080-b701-e43bfe2e6c0d" containerName="extract-content" Oct 09 09:41:15 crc kubenswrapper[4710]: E1009 09:41:15.421184 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a884cb4-f253-4c96-9e29-5e60aff6f144" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.421190 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a884cb4-f253-4c96-9e29-5e60aff6f144" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:41:15 crc kubenswrapper[4710]: E1009 09:41:15.421198 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5d2570d-bd07-4bfe-b165-f5747b6d7854" containerName="extract-content" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.421205 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5d2570d-bd07-4bfe-b165-f5747b6d7854" containerName="extract-content" Oct 09 09:41:15 crc kubenswrapper[4710]: E1009 09:41:15.421214 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="967555f3-0213-4080-b701-e43bfe2e6c0d" containerName="registry-server" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.421219 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="967555f3-0213-4080-b701-e43bfe2e6c0d" containerName="registry-server" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.421418 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a884cb4-f253-4c96-9e29-5e60aff6f144" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.421453 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5d2570d-bd07-4bfe-b165-f5747b6d7854" containerName="registry-server" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.421472 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="967555f3-0213-4080-b701-e43bfe2e6c0d" containerName="registry-server" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.422118 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-6kwm8" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.424463 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.424904 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.425506 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.426863 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.428541 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.429978 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-6kwm8"] Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.470915 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/9910b475-f4c5-49db-b431-b7214908cf77-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-6kwm8\" (UID: \"9910b475-f4c5-49db-b431-b7214908cf77\") " pod="openstack/ssh-known-hosts-edpm-deployment-6kwm8" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.471021 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/9910b475-f4c5-49db-b431-b7214908cf77-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-6kwm8\" (UID: \"9910b475-f4c5-49db-b431-b7214908cf77\") " pod="openstack/ssh-known-hosts-edpm-deployment-6kwm8" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.471096 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfrxh\" (UniqueName: \"kubernetes.io/projected/9910b475-f4c5-49db-b431-b7214908cf77-kube-api-access-qfrxh\") pod \"ssh-known-hosts-edpm-deployment-6kwm8\" (UID: \"9910b475-f4c5-49db-b431-b7214908cf77\") " pod="openstack/ssh-known-hosts-edpm-deployment-6kwm8" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.471127 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9910b475-f4c5-49db-b431-b7214908cf77-ceph\") pod \"ssh-known-hosts-edpm-deployment-6kwm8\" (UID: \"9910b475-f4c5-49db-b431-b7214908cf77\") " pod="openstack/ssh-known-hosts-edpm-deployment-6kwm8" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.572948 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/9910b475-f4c5-49db-b431-b7214908cf77-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-6kwm8\" (UID: \"9910b475-f4c5-49db-b431-b7214908cf77\") " pod="openstack/ssh-known-hosts-edpm-deployment-6kwm8" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.573063 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/9910b475-f4c5-49db-b431-b7214908cf77-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-6kwm8\" (UID: \"9910b475-f4c5-49db-b431-b7214908cf77\") " pod="openstack/ssh-known-hosts-edpm-deployment-6kwm8" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.573150 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfrxh\" (UniqueName: \"kubernetes.io/projected/9910b475-f4c5-49db-b431-b7214908cf77-kube-api-access-qfrxh\") pod \"ssh-known-hosts-edpm-deployment-6kwm8\" (UID: \"9910b475-f4c5-49db-b431-b7214908cf77\") " pod="openstack/ssh-known-hosts-edpm-deployment-6kwm8" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.573184 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9910b475-f4c5-49db-b431-b7214908cf77-ceph\") pod \"ssh-known-hosts-edpm-deployment-6kwm8\" (UID: \"9910b475-f4c5-49db-b431-b7214908cf77\") " pod="openstack/ssh-known-hosts-edpm-deployment-6kwm8" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.585123 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/9910b475-f4c5-49db-b431-b7214908cf77-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-6kwm8\" (UID: \"9910b475-f4c5-49db-b431-b7214908cf77\") " pod="openstack/ssh-known-hosts-edpm-deployment-6kwm8" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.585903 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9910b475-f4c5-49db-b431-b7214908cf77-ceph\") pod \"ssh-known-hosts-edpm-deployment-6kwm8\" (UID: \"9910b475-f4c5-49db-b431-b7214908cf77\") " pod="openstack/ssh-known-hosts-edpm-deployment-6kwm8" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.586219 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/9910b475-f4c5-49db-b431-b7214908cf77-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-6kwm8\" (UID: \"9910b475-f4c5-49db-b431-b7214908cf77\") " pod="openstack/ssh-known-hosts-edpm-deployment-6kwm8" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.588625 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfrxh\" (UniqueName: \"kubernetes.io/projected/9910b475-f4c5-49db-b431-b7214908cf77-kube-api-access-qfrxh\") pod \"ssh-known-hosts-edpm-deployment-6kwm8\" (UID: \"9910b475-f4c5-49db-b431-b7214908cf77\") " pod="openstack/ssh-known-hosts-edpm-deployment-6kwm8" Oct 09 09:41:15 crc kubenswrapper[4710]: I1009 09:41:15.741119 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-6kwm8" Oct 09 09:41:16 crc kubenswrapper[4710]: I1009 09:41:16.217391 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-6kwm8"] Oct 09 09:41:16 crc kubenswrapper[4710]: I1009 09:41:16.341899 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-6kwm8" event={"ID":"9910b475-f4c5-49db-b431-b7214908cf77","Type":"ContainerStarted","Data":"c94d53e254d0337a085a835b8a16cb9efe95de30603fbc263c50d5820cc9523d"} Oct 09 09:41:17 crc kubenswrapper[4710]: I1009 09:41:17.352551 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-6kwm8" event={"ID":"9910b475-f4c5-49db-b431-b7214908cf77","Type":"ContainerStarted","Data":"5d2ffa7a52aafdd93c250514a875e6cbdb96032b9de406848b7c07286672af2a"} Oct 09 09:41:17 crc kubenswrapper[4710]: I1009 09:41:17.397355 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-6kwm8" podStartSLOduration=1.900916655 podStartE2EDuration="2.397330614s" podCreationTimestamp="2025-10-09 09:41:15 +0000 UTC" firstStartedPulling="2025-10-09 09:41:16.226916221 +0000 UTC m=+2199.717024617" lastFinishedPulling="2025-10-09 09:41:16.723330179 +0000 UTC m=+2200.213438576" observedRunningTime="2025-10-09 09:41:17.395256031 +0000 UTC m=+2200.885364428" watchObservedRunningTime="2025-10-09 09:41:17.397330614 +0000 UTC m=+2200.887439001" Oct 09 09:41:24 crc kubenswrapper[4710]: I1009 09:41:24.406879 4710 generic.go:334] "Generic (PLEG): container finished" podID="9910b475-f4c5-49db-b431-b7214908cf77" containerID="5d2ffa7a52aafdd93c250514a875e6cbdb96032b9de406848b7c07286672af2a" exitCode=0 Oct 09 09:41:24 crc kubenswrapper[4710]: I1009 09:41:24.406966 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-6kwm8" event={"ID":"9910b475-f4c5-49db-b431-b7214908cf77","Type":"ContainerDied","Data":"5d2ffa7a52aafdd93c250514a875e6cbdb96032b9de406848b7c07286672af2a"} Oct 09 09:41:25 crc kubenswrapper[4710]: I1009 09:41:25.785367 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-6kwm8" Oct 09 09:41:25 crc kubenswrapper[4710]: I1009 09:41:25.970337 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9910b475-f4c5-49db-b431-b7214908cf77-ceph\") pod \"9910b475-f4c5-49db-b431-b7214908cf77\" (UID: \"9910b475-f4c5-49db-b431-b7214908cf77\") " Oct 09 09:41:25 crc kubenswrapper[4710]: I1009 09:41:25.970474 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfrxh\" (UniqueName: \"kubernetes.io/projected/9910b475-f4c5-49db-b431-b7214908cf77-kube-api-access-qfrxh\") pod \"9910b475-f4c5-49db-b431-b7214908cf77\" (UID: \"9910b475-f4c5-49db-b431-b7214908cf77\") " Oct 09 09:41:25 crc kubenswrapper[4710]: I1009 09:41:25.970678 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/9910b475-f4c5-49db-b431-b7214908cf77-ssh-key-openstack-edpm-ipam\") pod \"9910b475-f4c5-49db-b431-b7214908cf77\" (UID: \"9910b475-f4c5-49db-b431-b7214908cf77\") " Oct 09 09:41:25 crc kubenswrapper[4710]: I1009 09:41:25.970704 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/9910b475-f4c5-49db-b431-b7214908cf77-inventory-0\") pod \"9910b475-f4c5-49db-b431-b7214908cf77\" (UID: \"9910b475-f4c5-49db-b431-b7214908cf77\") " Oct 09 09:41:25 crc kubenswrapper[4710]: I1009 09:41:25.976006 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9910b475-f4c5-49db-b431-b7214908cf77-kube-api-access-qfrxh" (OuterVolumeSpecName: "kube-api-access-qfrxh") pod "9910b475-f4c5-49db-b431-b7214908cf77" (UID: "9910b475-f4c5-49db-b431-b7214908cf77"). InnerVolumeSpecName "kube-api-access-qfrxh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:41:25 crc kubenswrapper[4710]: I1009 09:41:25.976603 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9910b475-f4c5-49db-b431-b7214908cf77-ceph" (OuterVolumeSpecName: "ceph") pod "9910b475-f4c5-49db-b431-b7214908cf77" (UID: "9910b475-f4c5-49db-b431-b7214908cf77"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:41:25 crc kubenswrapper[4710]: I1009 09:41:25.992060 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9910b475-f4c5-49db-b431-b7214908cf77-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "9910b475-f4c5-49db-b431-b7214908cf77" (UID: "9910b475-f4c5-49db-b431-b7214908cf77"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:41:25 crc kubenswrapper[4710]: I1009 09:41:25.993773 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9910b475-f4c5-49db-b431-b7214908cf77-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "9910b475-f4c5-49db-b431-b7214908cf77" (UID: "9910b475-f4c5-49db-b431-b7214908cf77"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.073501 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/9910b475-f4c5-49db-b431-b7214908cf77-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.073533 4710 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/9910b475-f4c5-49db-b431-b7214908cf77-inventory-0\") on node \"crc\" DevicePath \"\"" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.073544 4710 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/9910b475-f4c5-49db-b431-b7214908cf77-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.073556 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfrxh\" (UniqueName: \"kubernetes.io/projected/9910b475-f4c5-49db-b431-b7214908cf77-kube-api-access-qfrxh\") on node \"crc\" DevicePath \"\"" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.422419 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-6kwm8" event={"ID":"9910b475-f4c5-49db-b431-b7214908cf77","Type":"ContainerDied","Data":"c94d53e254d0337a085a835b8a16cb9efe95de30603fbc263c50d5820cc9523d"} Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.422475 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c94d53e254d0337a085a835b8a16cb9efe95de30603fbc263c50d5820cc9523d" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.422502 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-6kwm8" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.507570 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks"] Oct 09 09:41:26 crc kubenswrapper[4710]: E1009 09:41:26.508360 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9910b475-f4c5-49db-b431-b7214908cf77" containerName="ssh-known-hosts-edpm-deployment" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.508380 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="9910b475-f4c5-49db-b431-b7214908cf77" containerName="ssh-known-hosts-edpm-deployment" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.508641 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="9910b475-f4c5-49db-b431-b7214908cf77" containerName="ssh-known-hosts-edpm-deployment" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.509414 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.511504 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.513480 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.513629 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.513643 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.513683 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.519131 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks"] Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.588612 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3015dc52-bcf5-444b-9200-82a3f79b0fcb-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-296ks\" (UID: \"3015dc52-bcf5-444b-9200-82a3f79b0fcb\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.588706 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3015dc52-bcf5-444b-9200-82a3f79b0fcb-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-296ks\" (UID: \"3015dc52-bcf5-444b-9200-82a3f79b0fcb\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.588751 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7gm5\" (UniqueName: \"kubernetes.io/projected/3015dc52-bcf5-444b-9200-82a3f79b0fcb-kube-api-access-d7gm5\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-296ks\" (UID: \"3015dc52-bcf5-444b-9200-82a3f79b0fcb\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.588835 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3015dc52-bcf5-444b-9200-82a3f79b0fcb-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-296ks\" (UID: \"3015dc52-bcf5-444b-9200-82a3f79b0fcb\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.689708 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3015dc52-bcf5-444b-9200-82a3f79b0fcb-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-296ks\" (UID: \"3015dc52-bcf5-444b-9200-82a3f79b0fcb\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.689950 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7gm5\" (UniqueName: \"kubernetes.io/projected/3015dc52-bcf5-444b-9200-82a3f79b0fcb-kube-api-access-d7gm5\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-296ks\" (UID: \"3015dc52-bcf5-444b-9200-82a3f79b0fcb\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.690080 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3015dc52-bcf5-444b-9200-82a3f79b0fcb-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-296ks\" (UID: \"3015dc52-bcf5-444b-9200-82a3f79b0fcb\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.690184 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3015dc52-bcf5-444b-9200-82a3f79b0fcb-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-296ks\" (UID: \"3015dc52-bcf5-444b-9200-82a3f79b0fcb\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.695565 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3015dc52-bcf5-444b-9200-82a3f79b0fcb-ceph\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-296ks\" (UID: \"3015dc52-bcf5-444b-9200-82a3f79b0fcb\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.695850 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3015dc52-bcf5-444b-9200-82a3f79b0fcb-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-296ks\" (UID: \"3015dc52-bcf5-444b-9200-82a3f79b0fcb\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.696055 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3015dc52-bcf5-444b-9200-82a3f79b0fcb-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-296ks\" (UID: \"3015dc52-bcf5-444b-9200-82a3f79b0fcb\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.704099 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7gm5\" (UniqueName: \"kubernetes.io/projected/3015dc52-bcf5-444b-9200-82a3f79b0fcb-kube-api-access-d7gm5\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-296ks\" (UID: \"3015dc52-bcf5-444b-9200-82a3f79b0fcb\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks" Oct 09 09:41:26 crc kubenswrapper[4710]: I1009 09:41:26.826149 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks" Oct 09 09:41:27 crc kubenswrapper[4710]: I1009 09:41:27.301465 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks"] Oct 09 09:41:27 crc kubenswrapper[4710]: I1009 09:41:27.432557 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks" event={"ID":"3015dc52-bcf5-444b-9200-82a3f79b0fcb","Type":"ContainerStarted","Data":"9596255ed6cd69af9edfa8fded43c5596ebfbdee4bda66fe66c25d6d32f37b09"} Oct 09 09:41:28 crc kubenswrapper[4710]: I1009 09:41:28.441296 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks" event={"ID":"3015dc52-bcf5-444b-9200-82a3f79b0fcb","Type":"ContainerStarted","Data":"f27d2c6d0ebe2bde15f35ebdeee0fdc94527cf651d62e522c079e71be5cb08ab"} Oct 09 09:41:34 crc kubenswrapper[4710]: I1009 09:41:34.484092 4710 generic.go:334] "Generic (PLEG): container finished" podID="3015dc52-bcf5-444b-9200-82a3f79b0fcb" containerID="f27d2c6d0ebe2bde15f35ebdeee0fdc94527cf651d62e522c079e71be5cb08ab" exitCode=0 Oct 09 09:41:34 crc kubenswrapper[4710]: I1009 09:41:34.484170 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks" event={"ID":"3015dc52-bcf5-444b-9200-82a3f79b0fcb","Type":"ContainerDied","Data":"f27d2c6d0ebe2bde15f35ebdeee0fdc94527cf651d62e522c079e71be5cb08ab"} Oct 09 09:41:35 crc kubenswrapper[4710]: I1009 09:41:35.545534 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:41:35 crc kubenswrapper[4710]: I1009 09:41:35.545599 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:41:35 crc kubenswrapper[4710]: I1009 09:41:35.545648 4710 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:41:35 crc kubenswrapper[4710]: I1009 09:41:35.546298 4710 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2"} pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 09:41:35 crc kubenswrapper[4710]: I1009 09:41:35.546358 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" containerID="cri-o://ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" gracePeriod=600 Oct 09 09:41:35 crc kubenswrapper[4710]: E1009 09:41:35.674707 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:41:35 crc kubenswrapper[4710]: I1009 09:41:35.880958 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.036445 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3015dc52-bcf5-444b-9200-82a3f79b0fcb-inventory\") pod \"3015dc52-bcf5-444b-9200-82a3f79b0fcb\" (UID: \"3015dc52-bcf5-444b-9200-82a3f79b0fcb\") " Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.036635 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3015dc52-bcf5-444b-9200-82a3f79b0fcb-ssh-key\") pod \"3015dc52-bcf5-444b-9200-82a3f79b0fcb\" (UID: \"3015dc52-bcf5-444b-9200-82a3f79b0fcb\") " Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.036754 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3015dc52-bcf5-444b-9200-82a3f79b0fcb-ceph\") pod \"3015dc52-bcf5-444b-9200-82a3f79b0fcb\" (UID: \"3015dc52-bcf5-444b-9200-82a3f79b0fcb\") " Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.036829 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d7gm5\" (UniqueName: \"kubernetes.io/projected/3015dc52-bcf5-444b-9200-82a3f79b0fcb-kube-api-access-d7gm5\") pod \"3015dc52-bcf5-444b-9200-82a3f79b0fcb\" (UID: \"3015dc52-bcf5-444b-9200-82a3f79b0fcb\") " Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.047408 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3015dc52-bcf5-444b-9200-82a3f79b0fcb-kube-api-access-d7gm5" (OuterVolumeSpecName: "kube-api-access-d7gm5") pod "3015dc52-bcf5-444b-9200-82a3f79b0fcb" (UID: "3015dc52-bcf5-444b-9200-82a3f79b0fcb"). InnerVolumeSpecName "kube-api-access-d7gm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.048692 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3015dc52-bcf5-444b-9200-82a3f79b0fcb-ceph" (OuterVolumeSpecName: "ceph") pod "3015dc52-bcf5-444b-9200-82a3f79b0fcb" (UID: "3015dc52-bcf5-444b-9200-82a3f79b0fcb"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.057187 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3015dc52-bcf5-444b-9200-82a3f79b0fcb-inventory" (OuterVolumeSpecName: "inventory") pod "3015dc52-bcf5-444b-9200-82a3f79b0fcb" (UID: "3015dc52-bcf5-444b-9200-82a3f79b0fcb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.057794 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3015dc52-bcf5-444b-9200-82a3f79b0fcb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3015dc52-bcf5-444b-9200-82a3f79b0fcb" (UID: "3015dc52-bcf5-444b-9200-82a3f79b0fcb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.138334 4710 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3015dc52-bcf5-444b-9200-82a3f79b0fcb-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.138366 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d7gm5\" (UniqueName: \"kubernetes.io/projected/3015dc52-bcf5-444b-9200-82a3f79b0fcb-kube-api-access-d7gm5\") on node \"crc\" DevicePath \"\"" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.138377 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3015dc52-bcf5-444b-9200-82a3f79b0fcb-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.138387 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3015dc52-bcf5-444b-9200-82a3f79b0fcb-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.513596 4710 generic.go:334] "Generic (PLEG): container finished" podID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" exitCode=0 Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.513628 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerDied","Data":"ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2"} Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.513913 4710 scope.go:117] "RemoveContainer" containerID="6de4f77feff45a70d7347d44205a91ed5c6ec990b689388cd2cec08bb7b9671b" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.514552 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:41:36 crc kubenswrapper[4710]: E1009 09:41:36.514968 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.518630 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks" event={"ID":"3015dc52-bcf5-444b-9200-82a3f79b0fcb","Type":"ContainerDied","Data":"9596255ed6cd69af9edfa8fded43c5596ebfbdee4bda66fe66c25d6d32f37b09"} Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.518739 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9596255ed6cd69af9edfa8fded43c5596ebfbdee4bda66fe66c25d6d32f37b09" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.518850 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-296ks" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.589586 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs"] Oct 09 09:41:36 crc kubenswrapper[4710]: E1009 09:41:36.589962 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3015dc52-bcf5-444b-9200-82a3f79b0fcb" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.589976 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="3015dc52-bcf5-444b-9200-82a3f79b0fcb" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.590140 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="3015dc52-bcf5-444b-9200-82a3f79b0fcb" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.590830 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.593859 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.609163 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.609506 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.609698 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.609907 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.619923 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs"] Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.650836 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs\" (UID: \"cdbaf739-8dd9-457f-97a5-8ddbcff386ea\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.650898 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs\" (UID: \"cdbaf739-8dd9-457f-97a5-8ddbcff386ea\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.650970 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzh9t\" (UniqueName: \"kubernetes.io/projected/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-kube-api-access-kzh9t\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs\" (UID: \"cdbaf739-8dd9-457f-97a5-8ddbcff386ea\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.651010 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs\" (UID: \"cdbaf739-8dd9-457f-97a5-8ddbcff386ea\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.752367 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs\" (UID: \"cdbaf739-8dd9-457f-97a5-8ddbcff386ea\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.752690 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzh9t\" (UniqueName: \"kubernetes.io/projected/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-kube-api-access-kzh9t\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs\" (UID: \"cdbaf739-8dd9-457f-97a5-8ddbcff386ea\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.752839 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs\" (UID: \"cdbaf739-8dd9-457f-97a5-8ddbcff386ea\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.753141 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs\" (UID: \"cdbaf739-8dd9-457f-97a5-8ddbcff386ea\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.756964 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs\" (UID: \"cdbaf739-8dd9-457f-97a5-8ddbcff386ea\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.757071 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-ceph\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs\" (UID: \"cdbaf739-8dd9-457f-97a5-8ddbcff386ea\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.757354 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs\" (UID: \"cdbaf739-8dd9-457f-97a5-8ddbcff386ea\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.767504 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzh9t\" (UniqueName: \"kubernetes.io/projected/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-kube-api-access-kzh9t\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs\" (UID: \"cdbaf739-8dd9-457f-97a5-8ddbcff386ea\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.920716 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:41:36 crc kubenswrapper[4710]: I1009 09:41:36.929649 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs" Oct 09 09:41:37 crc kubenswrapper[4710]: I1009 09:41:37.374859 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs"] Oct 09 09:41:37 crc kubenswrapper[4710]: I1009 09:41:37.528183 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs" event={"ID":"cdbaf739-8dd9-457f-97a5-8ddbcff386ea","Type":"ContainerStarted","Data":"e1e9f76e5f3d35b35bb1098ea5a04bfda96f4be8afa46d077ab4947037399ec6"} Oct 09 09:41:37 crc kubenswrapper[4710]: I1009 09:41:37.873483 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:41:38 crc kubenswrapper[4710]: I1009 09:41:38.537533 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs" event={"ID":"cdbaf739-8dd9-457f-97a5-8ddbcff386ea","Type":"ContainerStarted","Data":"8c0d758affc7b9fcce497ad62812fe14eb7ee6df6258e8575d1e6fce944adb81"} Oct 09 09:41:38 crc kubenswrapper[4710]: I1009 09:41:38.552308 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs" podStartSLOduration=2.058535689 podStartE2EDuration="2.552287958s" podCreationTimestamp="2025-10-09 09:41:36 +0000 UTC" firstStartedPulling="2025-10-09 09:41:37.377981644 +0000 UTC m=+2220.868090042" lastFinishedPulling="2025-10-09 09:41:37.871733913 +0000 UTC m=+2221.361842311" observedRunningTime="2025-10-09 09:41:38.549055111 +0000 UTC m=+2222.039163508" watchObservedRunningTime="2025-10-09 09:41:38.552287958 +0000 UTC m=+2222.042396355" Oct 09 09:41:46 crc kubenswrapper[4710]: I1009 09:41:46.594829 4710 generic.go:334] "Generic (PLEG): container finished" podID="cdbaf739-8dd9-457f-97a5-8ddbcff386ea" containerID="8c0d758affc7b9fcce497ad62812fe14eb7ee6df6258e8575d1e6fce944adb81" exitCode=0 Oct 09 09:41:46 crc kubenswrapper[4710]: I1009 09:41:46.594923 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs" event={"ID":"cdbaf739-8dd9-457f-97a5-8ddbcff386ea","Type":"ContainerDied","Data":"8c0d758affc7b9fcce497ad62812fe14eb7ee6df6258e8575d1e6fce944adb81"} Oct 09 09:41:47 crc kubenswrapper[4710]: I1009 09:41:47.909123 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.047484 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-inventory\") pod \"cdbaf739-8dd9-457f-97a5-8ddbcff386ea\" (UID: \"cdbaf739-8dd9-457f-97a5-8ddbcff386ea\") " Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.047730 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-ssh-key\") pod \"cdbaf739-8dd9-457f-97a5-8ddbcff386ea\" (UID: \"cdbaf739-8dd9-457f-97a5-8ddbcff386ea\") " Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.047759 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzh9t\" (UniqueName: \"kubernetes.io/projected/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-kube-api-access-kzh9t\") pod \"cdbaf739-8dd9-457f-97a5-8ddbcff386ea\" (UID: \"cdbaf739-8dd9-457f-97a5-8ddbcff386ea\") " Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.047803 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-ceph\") pod \"cdbaf739-8dd9-457f-97a5-8ddbcff386ea\" (UID: \"cdbaf739-8dd9-457f-97a5-8ddbcff386ea\") " Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.063621 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-kube-api-access-kzh9t" (OuterVolumeSpecName: "kube-api-access-kzh9t") pod "cdbaf739-8dd9-457f-97a5-8ddbcff386ea" (UID: "cdbaf739-8dd9-457f-97a5-8ddbcff386ea"). InnerVolumeSpecName "kube-api-access-kzh9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.068674 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "cdbaf739-8dd9-457f-97a5-8ddbcff386ea" (UID: "cdbaf739-8dd9-457f-97a5-8ddbcff386ea"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.070161 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-ceph" (OuterVolumeSpecName: "ceph") pod "cdbaf739-8dd9-457f-97a5-8ddbcff386ea" (UID: "cdbaf739-8dd9-457f-97a5-8ddbcff386ea"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.091274 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-inventory" (OuterVolumeSpecName: "inventory") pod "cdbaf739-8dd9-457f-97a5-8ddbcff386ea" (UID: "cdbaf739-8dd9-457f-97a5-8ddbcff386ea"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.150500 4710 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.150614 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.150672 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.150744 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzh9t\" (UniqueName: \"kubernetes.io/projected/cdbaf739-8dd9-457f-97a5-8ddbcff386ea-kube-api-access-kzh9t\") on node \"crc\" DevicePath \"\"" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.612092 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs" event={"ID":"cdbaf739-8dd9-457f-97a5-8ddbcff386ea","Type":"ContainerDied","Data":"e1e9f76e5f3d35b35bb1098ea5a04bfda96f4be8afa46d077ab4947037399ec6"} Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.612140 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1e9f76e5f3d35b35bb1098ea5a04bfda96f4be8afa46d077ab4947037399ec6" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.612174 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.683695 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh"] Oct 09 09:41:48 crc kubenswrapper[4710]: E1009 09:41:48.684079 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdbaf739-8dd9-457f-97a5-8ddbcff386ea" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.684095 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdbaf739-8dd9-457f-97a5-8ddbcff386ea" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.684317 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdbaf739-8dd9-457f-97a5-8ddbcff386ea" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.686251 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.697854 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.699158 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.699241 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.705310 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh"] Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.706894 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.706929 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.707078 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.707115 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.707239 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.760351 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.760410 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.760446 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.760488 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.760505 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dt8p6\" (UniqueName: \"kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-kube-api-access-dt8p6\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.760520 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.760539 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.760557 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.760578 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.760606 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.760633 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.760649 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.760681 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.862287 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.862380 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.862408 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.862491 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.862511 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dt8p6\" (UniqueName: \"kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-kube-api-access-dt8p6\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.862529 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.862566 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.862596 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.862617 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.862653 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.862687 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.862710 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.862769 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.865608 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.865942 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.866331 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-ceph\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.866519 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.866849 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.867643 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.868987 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.869386 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.870894 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.871465 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.871873 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.874253 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.879079 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dt8p6\" (UniqueName: \"kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-kube-api-access-dt8p6\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:48 crc kubenswrapper[4710]: I1009 09:41:48.999603 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:41:49 crc kubenswrapper[4710]: I1009 09:41:49.460329 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh"] Oct 09 09:41:49 crc kubenswrapper[4710]: I1009 09:41:49.621108 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" event={"ID":"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf","Type":"ContainerStarted","Data":"384b1b633e9b6316430944f1a9122a610f1aa281e71d1b51edf8c3e71c447828"} Oct 09 09:41:50 crc kubenswrapper[4710]: I1009 09:41:50.633209 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" event={"ID":"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf","Type":"ContainerStarted","Data":"3a1479e2bebfc96f62a33c287fb169971702f0f91537107c902d71d47959bfe9"} Oct 09 09:41:50 crc kubenswrapper[4710]: I1009 09:41:50.649801 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" podStartSLOduration=2.097164297 podStartE2EDuration="2.649783662s" podCreationTimestamp="2025-10-09 09:41:48 +0000 UTC" firstStartedPulling="2025-10-09 09:41:49.46038621 +0000 UTC m=+2232.950494606" lastFinishedPulling="2025-10-09 09:41:50.013005574 +0000 UTC m=+2233.503113971" observedRunningTime="2025-10-09 09:41:50.649135099 +0000 UTC m=+2234.139243496" watchObservedRunningTime="2025-10-09 09:41:50.649783662 +0000 UTC m=+2234.139892059" Oct 09 09:41:50 crc kubenswrapper[4710]: I1009 09:41:50.814573 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:41:50 crc kubenswrapper[4710]: E1009 09:41:50.814806 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:42:04 crc kubenswrapper[4710]: I1009 09:42:04.815692 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:42:04 crc kubenswrapper[4710]: E1009 09:42:04.816772 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:42:13 crc kubenswrapper[4710]: I1009 09:42:13.809306 4710 generic.go:334] "Generic (PLEG): container finished" podID="77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf" containerID="3a1479e2bebfc96f62a33c287fb169971702f0f91537107c902d71d47959bfe9" exitCode=0 Oct 09 09:42:13 crc kubenswrapper[4710]: I1009 09:42:13.809397 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" event={"ID":"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf","Type":"ContainerDied","Data":"3a1479e2bebfc96f62a33c287fb169971702f0f91537107c902d71d47959bfe9"} Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.154997 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.246375 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.246596 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.246622 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-libvirt-combined-ca-bundle\") pod \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.246694 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-openstack-edpm-ipam-ovn-default-certs-0\") pod \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.246723 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-ovn-combined-ca-bundle\") pod \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.246743 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dt8p6\" (UniqueName: \"kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-kube-api-access-dt8p6\") pod \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.246775 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-ceph\") pod \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.246795 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-inventory\") pod \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.246830 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-bootstrap-combined-ca-bundle\") pod \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.246880 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-nova-combined-ca-bundle\") pod \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.246906 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-repo-setup-combined-ca-bundle\") pod \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.246943 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-ssh-key\") pod \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.247083 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-neutron-metadata-combined-ca-bundle\") pod \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\" (UID: \"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf\") " Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.253974 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf" (UID: "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.254373 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf" (UID: "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.254480 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf" (UID: "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.254646 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf" (UID: "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.254710 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf" (UID: "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.258538 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf" (UID: "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.258613 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf" (UID: "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.259079 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-ceph" (OuterVolumeSpecName: "ceph") pod "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf" (UID: "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.262597 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf" (UID: "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.273172 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf" (UID: "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.274762 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf" (UID: "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.278815 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-inventory" (OuterVolumeSpecName: "inventory") pod "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf" (UID: "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.290672 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-kube-api-access-dt8p6" (OuterVolumeSpecName: "kube-api-access-dt8p6") pod "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf" (UID: "77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf"). InnerVolumeSpecName "kube-api-access-dt8p6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.367388 4710 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.367417 4710 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.367457 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.367469 4710 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.367481 4710 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.367492 4710 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.367500 4710 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.367510 4710 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.367520 4710 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.367529 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dt8p6\" (UniqueName: \"kubernetes.io/projected/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-kube-api-access-dt8p6\") on node \"crc\" DevicePath \"\"" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.367536 4710 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.367543 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.367552 4710 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.828019 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" event={"ID":"77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf","Type":"ContainerDied","Data":"384b1b633e9b6316430944f1a9122a610f1aa281e71d1b51edf8c3e71c447828"} Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.828336 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="384b1b633e9b6316430944f1a9122a610f1aa281e71d1b51edf8c3e71c447828" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.828127 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.908473 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2"] Oct 09 09:42:15 crc kubenswrapper[4710]: E1009 09:42:15.908940 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.908965 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.909257 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.910014 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.914615 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.914615 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.914653 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.915165 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.918340 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.933542 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2"] Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.978252 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qrbb\" (UniqueName: \"kubernetes.io/projected/18a1b835-afd2-4ceb-ad50-156b24a80601-kube-api-access-9qrbb\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2\" (UID: \"18a1b835-afd2-4ceb-ad50-156b24a80601\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.978335 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18a1b835-afd2-4ceb-ad50-156b24a80601-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2\" (UID: \"18a1b835-afd2-4ceb-ad50-156b24a80601\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.978512 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18a1b835-afd2-4ceb-ad50-156b24a80601-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2\" (UID: \"18a1b835-afd2-4ceb-ad50-156b24a80601\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2" Oct 09 09:42:15 crc kubenswrapper[4710]: I1009 09:42:15.978621 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/18a1b835-afd2-4ceb-ad50-156b24a80601-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2\" (UID: \"18a1b835-afd2-4ceb-ad50-156b24a80601\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2" Oct 09 09:42:16 crc kubenswrapper[4710]: I1009 09:42:16.079973 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18a1b835-afd2-4ceb-ad50-156b24a80601-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2\" (UID: \"18a1b835-afd2-4ceb-ad50-156b24a80601\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2" Oct 09 09:42:16 crc kubenswrapper[4710]: I1009 09:42:16.080073 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18a1b835-afd2-4ceb-ad50-156b24a80601-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2\" (UID: \"18a1b835-afd2-4ceb-ad50-156b24a80601\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2" Oct 09 09:42:16 crc kubenswrapper[4710]: I1009 09:42:16.080273 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/18a1b835-afd2-4ceb-ad50-156b24a80601-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2\" (UID: \"18a1b835-afd2-4ceb-ad50-156b24a80601\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2" Oct 09 09:42:16 crc kubenswrapper[4710]: I1009 09:42:16.080367 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qrbb\" (UniqueName: \"kubernetes.io/projected/18a1b835-afd2-4ceb-ad50-156b24a80601-kube-api-access-9qrbb\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2\" (UID: \"18a1b835-afd2-4ceb-ad50-156b24a80601\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2" Oct 09 09:42:16 crc kubenswrapper[4710]: I1009 09:42:16.083960 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/18a1b835-afd2-4ceb-ad50-156b24a80601-ceph\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2\" (UID: \"18a1b835-afd2-4ceb-ad50-156b24a80601\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2" Oct 09 09:42:16 crc kubenswrapper[4710]: I1009 09:42:16.089005 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18a1b835-afd2-4ceb-ad50-156b24a80601-ssh-key\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2\" (UID: \"18a1b835-afd2-4ceb-ad50-156b24a80601\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2" Oct 09 09:42:16 crc kubenswrapper[4710]: I1009 09:42:16.094085 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18a1b835-afd2-4ceb-ad50-156b24a80601-inventory\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2\" (UID: \"18a1b835-afd2-4ceb-ad50-156b24a80601\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2" Oct 09 09:42:16 crc kubenswrapper[4710]: I1009 09:42:16.095405 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qrbb\" (UniqueName: \"kubernetes.io/projected/18a1b835-afd2-4ceb-ad50-156b24a80601-kube-api-access-9qrbb\") pod \"ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2\" (UID: \"18a1b835-afd2-4ceb-ad50-156b24a80601\") " pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2" Oct 09 09:42:16 crc kubenswrapper[4710]: I1009 09:42:16.225571 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2" Oct 09 09:42:16 crc kubenswrapper[4710]: I1009 09:42:16.685283 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2"] Oct 09 09:42:16 crc kubenswrapper[4710]: I1009 09:42:16.838168 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2" event={"ID":"18a1b835-afd2-4ceb-ad50-156b24a80601","Type":"ContainerStarted","Data":"8973becb7ac67546bfa318e0bc53c84eae993e5ebf159d9040bbcaa797113e05"} Oct 09 09:42:17 crc kubenswrapper[4710]: I1009 09:42:17.847636 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2" event={"ID":"18a1b835-afd2-4ceb-ad50-156b24a80601","Type":"ContainerStarted","Data":"9f7bc86d15473efacd76a0465a31de15733de05cfb1d82ad2ea58e896cc3ae39"} Oct 09 09:42:17 crc kubenswrapper[4710]: I1009 09:42:17.871585 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2" podStartSLOduration=2.20639284 podStartE2EDuration="2.87156796s" podCreationTimestamp="2025-10-09 09:42:15 +0000 UTC" firstStartedPulling="2025-10-09 09:42:16.692330969 +0000 UTC m=+2260.182439367" lastFinishedPulling="2025-10-09 09:42:17.35750609 +0000 UTC m=+2260.847614487" observedRunningTime="2025-10-09 09:42:17.862263573 +0000 UTC m=+2261.352371971" watchObservedRunningTime="2025-10-09 09:42:17.87156796 +0000 UTC m=+2261.361676357" Oct 09 09:42:18 crc kubenswrapper[4710]: I1009 09:42:18.815544 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:42:18 crc kubenswrapper[4710]: E1009 09:42:18.816234 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:42:21 crc kubenswrapper[4710]: I1009 09:42:21.879660 4710 generic.go:334] "Generic (PLEG): container finished" podID="18a1b835-afd2-4ceb-ad50-156b24a80601" containerID="9f7bc86d15473efacd76a0465a31de15733de05cfb1d82ad2ea58e896cc3ae39" exitCode=0 Oct 09 09:42:21 crc kubenswrapper[4710]: I1009 09:42:21.879748 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2" event={"ID":"18a1b835-afd2-4ceb-ad50-156b24a80601","Type":"ContainerDied","Data":"9f7bc86d15473efacd76a0465a31de15733de05cfb1d82ad2ea58e896cc3ae39"} Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.226621 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2" Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.304367 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18a1b835-afd2-4ceb-ad50-156b24a80601-ssh-key\") pod \"18a1b835-afd2-4ceb-ad50-156b24a80601\" (UID: \"18a1b835-afd2-4ceb-ad50-156b24a80601\") " Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.304488 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9qrbb\" (UniqueName: \"kubernetes.io/projected/18a1b835-afd2-4ceb-ad50-156b24a80601-kube-api-access-9qrbb\") pod \"18a1b835-afd2-4ceb-ad50-156b24a80601\" (UID: \"18a1b835-afd2-4ceb-ad50-156b24a80601\") " Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.304536 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18a1b835-afd2-4ceb-ad50-156b24a80601-inventory\") pod \"18a1b835-afd2-4ceb-ad50-156b24a80601\" (UID: \"18a1b835-afd2-4ceb-ad50-156b24a80601\") " Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.304730 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/18a1b835-afd2-4ceb-ad50-156b24a80601-ceph\") pod \"18a1b835-afd2-4ceb-ad50-156b24a80601\" (UID: \"18a1b835-afd2-4ceb-ad50-156b24a80601\") " Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.309724 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18a1b835-afd2-4ceb-ad50-156b24a80601-ceph" (OuterVolumeSpecName: "ceph") pod "18a1b835-afd2-4ceb-ad50-156b24a80601" (UID: "18a1b835-afd2-4ceb-ad50-156b24a80601"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.314473 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18a1b835-afd2-4ceb-ad50-156b24a80601-kube-api-access-9qrbb" (OuterVolumeSpecName: "kube-api-access-9qrbb") pod "18a1b835-afd2-4ceb-ad50-156b24a80601" (UID: "18a1b835-afd2-4ceb-ad50-156b24a80601"). InnerVolumeSpecName "kube-api-access-9qrbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.326800 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18a1b835-afd2-4ceb-ad50-156b24a80601-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "18a1b835-afd2-4ceb-ad50-156b24a80601" (UID: "18a1b835-afd2-4ceb-ad50-156b24a80601"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.328758 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18a1b835-afd2-4ceb-ad50-156b24a80601-inventory" (OuterVolumeSpecName: "inventory") pod "18a1b835-afd2-4ceb-ad50-156b24a80601" (UID: "18a1b835-afd2-4ceb-ad50-156b24a80601"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.406540 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18a1b835-afd2-4ceb-ad50-156b24a80601-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.406569 4710 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/18a1b835-afd2-4ceb-ad50-156b24a80601-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.406578 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18a1b835-afd2-4ceb-ad50-156b24a80601-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.406587 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9qrbb\" (UniqueName: \"kubernetes.io/projected/18a1b835-afd2-4ceb-ad50-156b24a80601-kube-api-access-9qrbb\") on node \"crc\" DevicePath \"\"" Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.896838 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2" event={"ID":"18a1b835-afd2-4ceb-ad50-156b24a80601","Type":"ContainerDied","Data":"8973becb7ac67546bfa318e0bc53c84eae993e5ebf159d9040bbcaa797113e05"} Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.896877 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8973becb7ac67546bfa318e0bc53c84eae993e5ebf159d9040bbcaa797113e05" Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.896898 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2" Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.970921 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm"] Oct 09 09:42:23 crc kubenswrapper[4710]: E1009 09:42:23.971666 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18a1b835-afd2-4ceb-ad50-156b24a80601" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.971783 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="18a1b835-afd2-4ceb-ad50-156b24a80601" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.972092 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="18a1b835-afd2-4ceb-ad50-156b24a80601" containerName="ceph-client-edpm-deployment-openstack-edpm-ipam" Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.972873 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.975048 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.976382 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.977307 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.977751 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.978051 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.981162 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 09 09:42:23 crc kubenswrapper[4710]: I1009 09:42:23.985411 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm"] Oct 09 09:42:24 crc kubenswrapper[4710]: I1009 09:42:24.016950 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fkddm\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" Oct 09 09:42:24 crc kubenswrapper[4710]: I1009 09:42:24.017267 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fkddm\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" Oct 09 09:42:24 crc kubenswrapper[4710]: I1009 09:42:24.017387 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fkddm\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" Oct 09 09:42:24 crc kubenswrapper[4710]: I1009 09:42:24.017865 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fkddm\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" Oct 09 09:42:24 crc kubenswrapper[4710]: I1009 09:42:24.018000 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fkddm\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" Oct 09 09:42:24 crc kubenswrapper[4710]: I1009 09:42:24.018058 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5295\" (UniqueName: \"kubernetes.io/projected/30e0d33b-9f4a-4209-ad66-d5f51af8deea-kube-api-access-k5295\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fkddm\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" Oct 09 09:42:24 crc kubenswrapper[4710]: I1009 09:42:24.119771 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fkddm\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" Oct 09 09:42:24 crc kubenswrapper[4710]: I1009 09:42:24.119840 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fkddm\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" Oct 09 09:42:24 crc kubenswrapper[4710]: I1009 09:42:24.119876 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5295\" (UniqueName: \"kubernetes.io/projected/30e0d33b-9f4a-4209-ad66-d5f51af8deea-kube-api-access-k5295\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fkddm\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" Oct 09 09:42:24 crc kubenswrapper[4710]: I1009 09:42:24.119916 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fkddm\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" Oct 09 09:42:24 crc kubenswrapper[4710]: I1009 09:42:24.119945 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fkddm\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" Oct 09 09:42:24 crc kubenswrapper[4710]: I1009 09:42:24.119963 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fkddm\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" Oct 09 09:42:24 crc kubenswrapper[4710]: I1009 09:42:24.120838 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fkddm\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" Oct 09 09:42:24 crc kubenswrapper[4710]: I1009 09:42:24.124912 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ceph\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fkddm\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" Oct 09 09:42:24 crc kubenswrapper[4710]: I1009 09:42:24.125321 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fkddm\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" Oct 09 09:42:24 crc kubenswrapper[4710]: I1009 09:42:24.126119 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fkddm\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" Oct 09 09:42:24 crc kubenswrapper[4710]: I1009 09:42:24.126377 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fkddm\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" Oct 09 09:42:24 crc kubenswrapper[4710]: I1009 09:42:24.133662 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5295\" (UniqueName: \"kubernetes.io/projected/30e0d33b-9f4a-4209-ad66-d5f51af8deea-kube-api-access-k5295\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-fkddm\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" Oct 09 09:42:24 crc kubenswrapper[4710]: I1009 09:42:24.289423 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" Oct 09 09:42:24 crc kubenswrapper[4710]: I1009 09:42:24.755943 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm"] Oct 09 09:42:24 crc kubenswrapper[4710]: I1009 09:42:24.758504 4710 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 09:42:24 crc kubenswrapper[4710]: I1009 09:42:24.907257 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" event={"ID":"30e0d33b-9f4a-4209-ad66-d5f51af8deea","Type":"ContainerStarted","Data":"b7d6b377e4272ad2b5011f55d24fbe742db60d84a21298cff57e2f58239ee445"} Oct 09 09:42:25 crc kubenswrapper[4710]: I1009 09:42:25.919327 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" event={"ID":"30e0d33b-9f4a-4209-ad66-d5f51af8deea","Type":"ContainerStarted","Data":"ed13ce14e3fae49934a6584894a11d515e088f4ffd3466eff91e14d78eecc044"} Oct 09 09:42:25 crc kubenswrapper[4710]: I1009 09:42:25.938599 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" podStartSLOduration=2.440795246 podStartE2EDuration="2.938580139s" podCreationTimestamp="2025-10-09 09:42:23 +0000 UTC" firstStartedPulling="2025-10-09 09:42:24.75788589 +0000 UTC m=+2268.247994297" lastFinishedPulling="2025-10-09 09:42:25.255670794 +0000 UTC m=+2268.745779190" observedRunningTime="2025-10-09 09:42:25.934134244 +0000 UTC m=+2269.424242642" watchObservedRunningTime="2025-10-09 09:42:25.938580139 +0000 UTC m=+2269.428688536" Oct 09 09:42:32 crc kubenswrapper[4710]: I1009 09:42:32.815055 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:42:32 crc kubenswrapper[4710]: E1009 09:42:32.815621 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:42:43 crc kubenswrapper[4710]: I1009 09:42:43.815580 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:42:43 crc kubenswrapper[4710]: E1009 09:42:43.816413 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:42:58 crc kubenswrapper[4710]: I1009 09:42:58.815402 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:42:58 crc kubenswrapper[4710]: E1009 09:42:58.816256 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:43:12 crc kubenswrapper[4710]: I1009 09:43:12.815359 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:43:12 crc kubenswrapper[4710]: E1009 09:43:12.816111 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:43:20 crc kubenswrapper[4710]: I1009 09:43:20.354287 4710 generic.go:334] "Generic (PLEG): container finished" podID="30e0d33b-9f4a-4209-ad66-d5f51af8deea" containerID="ed13ce14e3fae49934a6584894a11d515e088f4ffd3466eff91e14d78eecc044" exitCode=0 Oct 09 09:43:20 crc kubenswrapper[4710]: I1009 09:43:20.354372 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" event={"ID":"30e0d33b-9f4a-4209-ad66-d5f51af8deea","Type":"ContainerDied","Data":"ed13ce14e3fae49934a6584894a11d515e088f4ffd3466eff91e14d78eecc044"} Oct 09 09:43:21 crc kubenswrapper[4710]: I1009 09:43:21.751557 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" Oct 09 09:43:21 crc kubenswrapper[4710]: I1009 09:43:21.871863 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ceph\") pod \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " Oct 09 09:43:21 crc kubenswrapper[4710]: I1009 09:43:21.871914 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5295\" (UniqueName: \"kubernetes.io/projected/30e0d33b-9f4a-4209-ad66-d5f51af8deea-kube-api-access-k5295\") pod \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " Oct 09 09:43:21 crc kubenswrapper[4710]: I1009 09:43:21.871962 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ssh-key\") pod \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " Oct 09 09:43:21 crc kubenswrapper[4710]: I1009 09:43:21.872473 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ovn-combined-ca-bundle\") pod \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " Oct 09 09:43:21 crc kubenswrapper[4710]: I1009 09:43:21.872873 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-inventory\") pod \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " Oct 09 09:43:21 crc kubenswrapper[4710]: I1009 09:43:21.873011 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ovncontroller-config-0\") pod \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\" (UID: \"30e0d33b-9f4a-4209-ad66-d5f51af8deea\") " Oct 09 09:43:21 crc kubenswrapper[4710]: I1009 09:43:21.879077 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "30e0d33b-9f4a-4209-ad66-d5f51af8deea" (UID: "30e0d33b-9f4a-4209-ad66-d5f51af8deea"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:43:21 crc kubenswrapper[4710]: I1009 09:43:21.879687 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ceph" (OuterVolumeSpecName: "ceph") pod "30e0d33b-9f4a-4209-ad66-d5f51af8deea" (UID: "30e0d33b-9f4a-4209-ad66-d5f51af8deea"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:43:21 crc kubenswrapper[4710]: I1009 09:43:21.879891 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30e0d33b-9f4a-4209-ad66-d5f51af8deea-kube-api-access-k5295" (OuterVolumeSpecName: "kube-api-access-k5295") pod "30e0d33b-9f4a-4209-ad66-d5f51af8deea" (UID: "30e0d33b-9f4a-4209-ad66-d5f51af8deea"). InnerVolumeSpecName "kube-api-access-k5295". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:43:21 crc kubenswrapper[4710]: I1009 09:43:21.896382 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "30e0d33b-9f4a-4209-ad66-d5f51af8deea" (UID: "30e0d33b-9f4a-4209-ad66-d5f51af8deea"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:43:21 crc kubenswrapper[4710]: I1009 09:43:21.900402 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-inventory" (OuterVolumeSpecName: "inventory") pod "30e0d33b-9f4a-4209-ad66-d5f51af8deea" (UID: "30e0d33b-9f4a-4209-ad66-d5f51af8deea"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:43:21 crc kubenswrapper[4710]: I1009 09:43:21.900814 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "30e0d33b-9f4a-4209-ad66-d5f51af8deea" (UID: "30e0d33b-9f4a-4209-ad66-d5f51af8deea"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:43:21 crc kubenswrapper[4710]: I1009 09:43:21.977075 4710 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 09 09:43:21 crc kubenswrapper[4710]: I1009 09:43:21.977317 4710 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 09:43:21 crc kubenswrapper[4710]: I1009 09:43:21.977332 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5295\" (UniqueName: \"kubernetes.io/projected/30e0d33b-9f4a-4209-ad66-d5f51af8deea-kube-api-access-k5295\") on node \"crc\" DevicePath \"\"" Oct 09 09:43:21 crc kubenswrapper[4710]: I1009 09:43:21.977345 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:43:21 crc kubenswrapper[4710]: I1009 09:43:21.977354 4710 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:43:21 crc kubenswrapper[4710]: I1009 09:43:21.977362 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/30e0d33b-9f4a-4209-ad66-d5f51af8deea-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.371747 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" event={"ID":"30e0d33b-9f4a-4209-ad66-d5f51af8deea","Type":"ContainerDied","Data":"b7d6b377e4272ad2b5011f55d24fbe742db60d84a21298cff57e2f58239ee445"} Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.371799 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b7d6b377e4272ad2b5011f55d24fbe742db60d84a21298cff57e2f58239ee445" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.371808 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-fkddm" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.454815 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl"] Oct 09 09:43:22 crc kubenswrapper[4710]: E1009 09:43:22.455517 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30e0d33b-9f4a-4209-ad66-d5f51af8deea" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.455540 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="30e0d33b-9f4a-4209-ad66-d5f51af8deea" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.455871 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="30e0d33b-9f4a-4209-ad66-d5f51af8deea" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.456682 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.463920 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.463923 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.464319 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.464392 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.464862 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.465055 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.464683 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.472076 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl"] Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.494862 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.494919 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.494972 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.495024 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.495044 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.495141 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.495177 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f44zr\" (UniqueName: \"kubernetes.io/projected/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-kube-api-access-f44zr\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.596604 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.596645 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f44zr\" (UniqueName: \"kubernetes.io/projected/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-kube-api-access-f44zr\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.596680 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.596715 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.596747 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.596784 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.596799 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.601692 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.601693 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.601840 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.601994 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.602476 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-ceph\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.605162 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.616185 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f44zr\" (UniqueName: \"kubernetes.io/projected/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-kube-api-access-f44zr\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:22 crc kubenswrapper[4710]: I1009 09:43:22.772973 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:43:23 crc kubenswrapper[4710]: I1009 09:43:23.230706 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl"] Oct 09 09:43:23 crc kubenswrapper[4710]: I1009 09:43:23.386048 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" event={"ID":"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77","Type":"ContainerStarted","Data":"8eaa1c03ce3f7ce65bb222fed3cee340f036d5f9884e9cb76957d54098273aeb"} Oct 09 09:43:23 crc kubenswrapper[4710]: I1009 09:43:23.815315 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:43:23 crc kubenswrapper[4710]: E1009 09:43:23.815519 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:43:24 crc kubenswrapper[4710]: I1009 09:43:24.396694 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" event={"ID":"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77","Type":"ContainerStarted","Data":"befcf8e9f113721ea35ef68078364e5540dc1122db6e366e7008322d01379b2b"} Oct 09 09:43:24 crc kubenswrapper[4710]: I1009 09:43:24.413195 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" podStartSLOduration=1.833513725 podStartE2EDuration="2.413161963s" podCreationTimestamp="2025-10-09 09:43:22 +0000 UTC" firstStartedPulling="2025-10-09 09:43:23.228279765 +0000 UTC m=+2326.718388162" lastFinishedPulling="2025-10-09 09:43:23.807928003 +0000 UTC m=+2327.298036400" observedRunningTime="2025-10-09 09:43:24.408852646 +0000 UTC m=+2327.898961042" watchObservedRunningTime="2025-10-09 09:43:24.413161963 +0000 UTC m=+2327.903270360" Oct 09 09:43:38 crc kubenswrapper[4710]: I1009 09:43:38.816114 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:43:38 crc kubenswrapper[4710]: E1009 09:43:38.817159 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:43:50 crc kubenswrapper[4710]: I1009 09:43:50.815795 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:43:50 crc kubenswrapper[4710]: E1009 09:43:50.816833 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:44:03 crc kubenswrapper[4710]: I1009 09:44:03.815708 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:44:03 crc kubenswrapper[4710]: E1009 09:44:03.816717 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:44:09 crc kubenswrapper[4710]: I1009 09:44:09.790269 4710 generic.go:334] "Generic (PLEG): container finished" podID="7688fa72-f35a-4dd1-a1de-1eda8bf5ff77" containerID="befcf8e9f113721ea35ef68078364e5540dc1122db6e366e7008322d01379b2b" exitCode=0 Oct 09 09:44:09 crc kubenswrapper[4710]: I1009 09:44:09.790391 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" event={"ID":"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77","Type":"ContainerDied","Data":"befcf8e9f113721ea35ef68078364e5540dc1122db6e366e7008322d01379b2b"} Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.214807 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.287564 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-ssh-key\") pod \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.287684 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-inventory\") pod \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.287784 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-nova-metadata-neutron-config-0\") pod \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.287867 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-neutron-ovn-metadata-agent-neutron-config-0\") pod \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.287913 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-ceph\") pod \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.289137 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f44zr\" (UniqueName: \"kubernetes.io/projected/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-kube-api-access-f44zr\") pod \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.289231 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-neutron-metadata-combined-ca-bundle\") pod \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\" (UID: \"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77\") " Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.295020 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "7688fa72-f35a-4dd1-a1de-1eda8bf5ff77" (UID: "7688fa72-f35a-4dd1-a1de-1eda8bf5ff77"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.297317 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-ceph" (OuterVolumeSpecName: "ceph") pod "7688fa72-f35a-4dd1-a1de-1eda8bf5ff77" (UID: "7688fa72-f35a-4dd1-a1de-1eda8bf5ff77"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.299942 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-kube-api-access-f44zr" (OuterVolumeSpecName: "kube-api-access-f44zr") pod "7688fa72-f35a-4dd1-a1de-1eda8bf5ff77" (UID: "7688fa72-f35a-4dd1-a1de-1eda8bf5ff77"). InnerVolumeSpecName "kube-api-access-f44zr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.314976 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7688fa72-f35a-4dd1-a1de-1eda8bf5ff77" (UID: "7688fa72-f35a-4dd1-a1de-1eda8bf5ff77"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.315754 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-inventory" (OuterVolumeSpecName: "inventory") pod "7688fa72-f35a-4dd1-a1de-1eda8bf5ff77" (UID: "7688fa72-f35a-4dd1-a1de-1eda8bf5ff77"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.316033 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "7688fa72-f35a-4dd1-a1de-1eda8bf5ff77" (UID: "7688fa72-f35a-4dd1-a1de-1eda8bf5ff77"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.319001 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "7688fa72-f35a-4dd1-a1de-1eda8bf5ff77" (UID: "7688fa72-f35a-4dd1-a1de-1eda8bf5ff77"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.393493 4710 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.393601 4710 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.393660 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f44zr\" (UniqueName: \"kubernetes.io/projected/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-kube-api-access-f44zr\") on node \"crc\" DevicePath \"\"" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.393718 4710 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.393769 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.393833 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.393890 4710 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/7688fa72-f35a-4dd1-a1de-1eda8bf5ff77-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.811518 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" event={"ID":"7688fa72-f35a-4dd1-a1de-1eda8bf5ff77","Type":"ContainerDied","Data":"8eaa1c03ce3f7ce65bb222fed3cee340f036d5f9884e9cb76957d54098273aeb"} Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.811854 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8eaa1c03ce3f7ce65bb222fed3cee340f036d5f9884e9cb76957d54098273aeb" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.811613 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.904699 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq"] Oct 09 09:44:11 crc kubenswrapper[4710]: E1009 09:44:11.912314 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7688fa72-f35a-4dd1-a1de-1eda8bf5ff77" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.912359 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="7688fa72-f35a-4dd1-a1de-1eda8bf5ff77" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.913224 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="7688fa72-f35a-4dd1-a1de-1eda8bf5ff77" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.914284 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.925734 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.926099 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.926291 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.926475 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.926532 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.926480 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 09 09:44:11 crc kubenswrapper[4710]: I1009 09:44:11.956561 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq"] Oct 09 09:44:12 crc kubenswrapper[4710]: I1009 09:44:12.015654 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" Oct 09 09:44:12 crc kubenswrapper[4710]: I1009 09:44:12.015698 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" Oct 09 09:44:12 crc kubenswrapper[4710]: I1009 09:44:12.015803 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" Oct 09 09:44:12 crc kubenswrapper[4710]: I1009 09:44:12.015865 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5th2\" (UniqueName: \"kubernetes.io/projected/3197a42e-d565-4f24-9115-990a46dfc659-kube-api-access-c5th2\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" Oct 09 09:44:12 crc kubenswrapper[4710]: I1009 09:44:12.015891 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" Oct 09 09:44:12 crc kubenswrapper[4710]: I1009 09:44:12.015944 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" Oct 09 09:44:12 crc kubenswrapper[4710]: I1009 09:44:12.117815 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" Oct 09 09:44:12 crc kubenswrapper[4710]: I1009 09:44:12.117901 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" Oct 09 09:44:12 crc kubenswrapper[4710]: I1009 09:44:12.117929 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" Oct 09 09:44:12 crc kubenswrapper[4710]: I1009 09:44:12.117987 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" Oct 09 09:44:12 crc kubenswrapper[4710]: I1009 09:44:12.118023 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5th2\" (UniqueName: \"kubernetes.io/projected/3197a42e-d565-4f24-9115-990a46dfc659-kube-api-access-c5th2\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" Oct 09 09:44:12 crc kubenswrapper[4710]: I1009 09:44:12.118042 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" Oct 09 09:44:12 crc kubenswrapper[4710]: I1009 09:44:12.125097 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" Oct 09 09:44:12 crc kubenswrapper[4710]: I1009 09:44:12.125637 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-ceph\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" Oct 09 09:44:12 crc kubenswrapper[4710]: I1009 09:44:12.126184 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" Oct 09 09:44:12 crc kubenswrapper[4710]: I1009 09:44:12.126607 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" Oct 09 09:44:12 crc kubenswrapper[4710]: I1009 09:44:12.127585 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" Oct 09 09:44:12 crc kubenswrapper[4710]: I1009 09:44:12.135168 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5th2\" (UniqueName: \"kubernetes.io/projected/3197a42e-d565-4f24-9115-990a46dfc659-kube-api-access-c5th2\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" Oct 09 09:44:12 crc kubenswrapper[4710]: I1009 09:44:12.259013 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" Oct 09 09:44:12 crc kubenswrapper[4710]: I1009 09:44:12.755469 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq"] Oct 09 09:44:12 crc kubenswrapper[4710]: I1009 09:44:12.823946 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" event={"ID":"3197a42e-d565-4f24-9115-990a46dfc659","Type":"ContainerStarted","Data":"f3ea780b4ca87ed37d315c4d0b3a83344ebafcf8d75506daef9f989d134f39e3"} Oct 09 09:44:13 crc kubenswrapper[4710]: I1009 09:44:13.834109 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" event={"ID":"3197a42e-d565-4f24-9115-990a46dfc659","Type":"ContainerStarted","Data":"26ff87a479813e6c2c638b3d88200dff5a22cef2b25e5c85bb7039ae17344248"} Oct 09 09:44:14 crc kubenswrapper[4710]: I1009 09:44:14.816149 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:44:14 crc kubenswrapper[4710]: E1009 09:44:14.816454 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:44:29 crc kubenswrapper[4710]: I1009 09:44:29.815542 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:44:29 crc kubenswrapper[4710]: E1009 09:44:29.816624 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:44:44 crc kubenswrapper[4710]: I1009 09:44:44.815132 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:44:44 crc kubenswrapper[4710]: E1009 09:44:44.816762 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:44:56 crc kubenswrapper[4710]: I1009 09:44:56.820128 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:44:56 crc kubenswrapper[4710]: E1009 09:44:56.821415 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:45:00 crc kubenswrapper[4710]: I1009 09:45:00.134122 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" podStartSLOduration=48.551210589 podStartE2EDuration="49.134094538s" podCreationTimestamp="2025-10-09 09:44:11 +0000 UTC" firstStartedPulling="2025-10-09 09:44:12.761204818 +0000 UTC m=+2376.251313216" lastFinishedPulling="2025-10-09 09:44:13.344088768 +0000 UTC m=+2376.834197165" observedRunningTime="2025-10-09 09:44:13.854521294 +0000 UTC m=+2377.344629691" watchObservedRunningTime="2025-10-09 09:45:00.134094538 +0000 UTC m=+2423.624202935" Oct 09 09:45:00 crc kubenswrapper[4710]: I1009 09:45:00.139401 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333385-cl6h4"] Oct 09 09:45:00 crc kubenswrapper[4710]: I1009 09:45:00.141016 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333385-cl6h4" Oct 09 09:45:00 crc kubenswrapper[4710]: I1009 09:45:00.142914 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 09 09:45:00 crc kubenswrapper[4710]: I1009 09:45:00.147344 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333385-cl6h4"] Oct 09 09:45:00 crc kubenswrapper[4710]: I1009 09:45:00.147637 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 09 09:45:00 crc kubenswrapper[4710]: I1009 09:45:00.268770 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shkcn\" (UniqueName: \"kubernetes.io/projected/3d79e7f1-2acc-4df9-be70-8775773f4f54-kube-api-access-shkcn\") pod \"collect-profiles-29333385-cl6h4\" (UID: \"3d79e7f1-2acc-4df9-be70-8775773f4f54\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333385-cl6h4" Oct 09 09:45:00 crc kubenswrapper[4710]: I1009 09:45:00.268988 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3d79e7f1-2acc-4df9-be70-8775773f4f54-config-volume\") pod \"collect-profiles-29333385-cl6h4\" (UID: \"3d79e7f1-2acc-4df9-be70-8775773f4f54\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333385-cl6h4" Oct 09 09:45:00 crc kubenswrapper[4710]: I1009 09:45:00.269252 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3d79e7f1-2acc-4df9-be70-8775773f4f54-secret-volume\") pod \"collect-profiles-29333385-cl6h4\" (UID: \"3d79e7f1-2acc-4df9-be70-8775773f4f54\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333385-cl6h4" Oct 09 09:45:00 crc kubenswrapper[4710]: I1009 09:45:00.370518 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3d79e7f1-2acc-4df9-be70-8775773f4f54-secret-volume\") pod \"collect-profiles-29333385-cl6h4\" (UID: \"3d79e7f1-2acc-4df9-be70-8775773f4f54\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333385-cl6h4" Oct 09 09:45:00 crc kubenswrapper[4710]: I1009 09:45:00.370647 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shkcn\" (UniqueName: \"kubernetes.io/projected/3d79e7f1-2acc-4df9-be70-8775773f4f54-kube-api-access-shkcn\") pod \"collect-profiles-29333385-cl6h4\" (UID: \"3d79e7f1-2acc-4df9-be70-8775773f4f54\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333385-cl6h4" Oct 09 09:45:00 crc kubenswrapper[4710]: I1009 09:45:00.370741 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3d79e7f1-2acc-4df9-be70-8775773f4f54-config-volume\") pod \"collect-profiles-29333385-cl6h4\" (UID: \"3d79e7f1-2acc-4df9-be70-8775773f4f54\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333385-cl6h4" Oct 09 09:45:00 crc kubenswrapper[4710]: I1009 09:45:00.372212 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3d79e7f1-2acc-4df9-be70-8775773f4f54-config-volume\") pod \"collect-profiles-29333385-cl6h4\" (UID: \"3d79e7f1-2acc-4df9-be70-8775773f4f54\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333385-cl6h4" Oct 09 09:45:00 crc kubenswrapper[4710]: I1009 09:45:00.376389 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3d79e7f1-2acc-4df9-be70-8775773f4f54-secret-volume\") pod \"collect-profiles-29333385-cl6h4\" (UID: \"3d79e7f1-2acc-4df9-be70-8775773f4f54\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333385-cl6h4" Oct 09 09:45:00 crc kubenswrapper[4710]: I1009 09:45:00.386535 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shkcn\" (UniqueName: \"kubernetes.io/projected/3d79e7f1-2acc-4df9-be70-8775773f4f54-kube-api-access-shkcn\") pod \"collect-profiles-29333385-cl6h4\" (UID: \"3d79e7f1-2acc-4df9-be70-8775773f4f54\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333385-cl6h4" Oct 09 09:45:00 crc kubenswrapper[4710]: I1009 09:45:00.461149 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333385-cl6h4" Oct 09 09:45:00 crc kubenswrapper[4710]: I1009 09:45:00.883446 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333385-cl6h4"] Oct 09 09:45:01 crc kubenswrapper[4710]: I1009 09:45:01.266092 4710 generic.go:334] "Generic (PLEG): container finished" podID="3d79e7f1-2acc-4df9-be70-8775773f4f54" containerID="523a489f4397f1e09d14b64f11c4959b44d1a9560f222487619d7e2821444b37" exitCode=0 Oct 09 09:45:01 crc kubenswrapper[4710]: I1009 09:45:01.266135 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333385-cl6h4" event={"ID":"3d79e7f1-2acc-4df9-be70-8775773f4f54","Type":"ContainerDied","Data":"523a489f4397f1e09d14b64f11c4959b44d1a9560f222487619d7e2821444b37"} Oct 09 09:45:01 crc kubenswrapper[4710]: I1009 09:45:01.266192 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333385-cl6h4" event={"ID":"3d79e7f1-2acc-4df9-be70-8775773f4f54","Type":"ContainerStarted","Data":"f38dc2c4120b10bfe55fb42ed9518383ebe7c1315a43901e28a5f52e23a118cf"} Oct 09 09:45:02 crc kubenswrapper[4710]: I1009 09:45:02.593876 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333385-cl6h4" Oct 09 09:45:02 crc kubenswrapper[4710]: I1009 09:45:02.713132 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3d79e7f1-2acc-4df9-be70-8775773f4f54-config-volume\") pod \"3d79e7f1-2acc-4df9-be70-8775773f4f54\" (UID: \"3d79e7f1-2acc-4df9-be70-8775773f4f54\") " Oct 09 09:45:02 crc kubenswrapper[4710]: I1009 09:45:02.713236 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3d79e7f1-2acc-4df9-be70-8775773f4f54-secret-volume\") pod \"3d79e7f1-2acc-4df9-be70-8775773f4f54\" (UID: \"3d79e7f1-2acc-4df9-be70-8775773f4f54\") " Oct 09 09:45:02 crc kubenswrapper[4710]: I1009 09:45:02.713365 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shkcn\" (UniqueName: \"kubernetes.io/projected/3d79e7f1-2acc-4df9-be70-8775773f4f54-kube-api-access-shkcn\") pod \"3d79e7f1-2acc-4df9-be70-8775773f4f54\" (UID: \"3d79e7f1-2acc-4df9-be70-8775773f4f54\") " Oct 09 09:45:02 crc kubenswrapper[4710]: I1009 09:45:02.713850 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d79e7f1-2acc-4df9-be70-8775773f4f54-config-volume" (OuterVolumeSpecName: "config-volume") pod "3d79e7f1-2acc-4df9-be70-8775773f4f54" (UID: "3d79e7f1-2acc-4df9-be70-8775773f4f54"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:45:02 crc kubenswrapper[4710]: I1009 09:45:02.714573 4710 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3d79e7f1-2acc-4df9-be70-8775773f4f54-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 09:45:02 crc kubenswrapper[4710]: I1009 09:45:02.720771 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d79e7f1-2acc-4df9-be70-8775773f4f54-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "3d79e7f1-2acc-4df9-be70-8775773f4f54" (UID: "3d79e7f1-2acc-4df9-be70-8775773f4f54"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:45:02 crc kubenswrapper[4710]: I1009 09:45:02.721891 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d79e7f1-2acc-4df9-be70-8775773f4f54-kube-api-access-shkcn" (OuterVolumeSpecName: "kube-api-access-shkcn") pod "3d79e7f1-2acc-4df9-be70-8775773f4f54" (UID: "3d79e7f1-2acc-4df9-be70-8775773f4f54"). InnerVolumeSpecName "kube-api-access-shkcn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:45:02 crc kubenswrapper[4710]: I1009 09:45:02.815806 4710 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3d79e7f1-2acc-4df9-be70-8775773f4f54-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 09 09:45:02 crc kubenswrapper[4710]: I1009 09:45:02.815831 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shkcn\" (UniqueName: \"kubernetes.io/projected/3d79e7f1-2acc-4df9-be70-8775773f4f54-kube-api-access-shkcn\") on node \"crc\" DevicePath \"\"" Oct 09 09:45:03 crc kubenswrapper[4710]: I1009 09:45:03.283465 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333385-cl6h4" event={"ID":"3d79e7f1-2acc-4df9-be70-8775773f4f54","Type":"ContainerDied","Data":"f38dc2c4120b10bfe55fb42ed9518383ebe7c1315a43901e28a5f52e23a118cf"} Oct 09 09:45:03 crc kubenswrapper[4710]: I1009 09:45:03.283517 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f38dc2c4120b10bfe55fb42ed9518383ebe7c1315a43901e28a5f52e23a118cf" Oct 09 09:45:03 crc kubenswrapper[4710]: I1009 09:45:03.283535 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333385-cl6h4" Oct 09 09:45:03 crc kubenswrapper[4710]: I1009 09:45:03.659527 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx"] Oct 09 09:45:03 crc kubenswrapper[4710]: I1009 09:45:03.665041 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333340-nq2tx"] Oct 09 09:45:04 crc kubenswrapper[4710]: I1009 09:45:04.826424 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f367bf0-fb0c-4884-b874-d4a426a8ff5a" path="/var/lib/kubelet/pods/2f367bf0-fb0c-4884-b874-d4a426a8ff5a/volumes" Oct 09 09:45:09 crc kubenswrapper[4710]: I1009 09:45:09.815135 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:45:09 crc kubenswrapper[4710]: E1009 09:45:09.816016 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:45:22 crc kubenswrapper[4710]: I1009 09:45:22.815072 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:45:22 crc kubenswrapper[4710]: E1009 09:45:22.817180 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:45:35 crc kubenswrapper[4710]: I1009 09:45:35.816122 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:45:35 crc kubenswrapper[4710]: E1009 09:45:35.817094 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:45:48 crc kubenswrapper[4710]: I1009 09:45:48.866612 4710 scope.go:117] "RemoveContainer" containerID="57693cd1cf50852dac8ec9a559e30eb932fbf87ee2f6b483640c94699816c4b5" Oct 09 09:45:49 crc kubenswrapper[4710]: I1009 09:45:49.815177 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:45:49 crc kubenswrapper[4710]: E1009 09:45:49.815769 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:46:02 crc kubenswrapper[4710]: I1009 09:46:02.816069 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:46:02 crc kubenswrapper[4710]: E1009 09:46:02.817336 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:46:13 crc kubenswrapper[4710]: I1009 09:46:13.814797 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:46:13 crc kubenswrapper[4710]: E1009 09:46:13.817023 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:46:28 crc kubenswrapper[4710]: I1009 09:46:28.815896 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:46:28 crc kubenswrapper[4710]: E1009 09:46:28.816881 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:46:41 crc kubenswrapper[4710]: I1009 09:46:41.815563 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:46:42 crc kubenswrapper[4710]: I1009 09:46:42.144708 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerStarted","Data":"053058b473714540c8ffd33c2543c1558912e10c236f0b0629e0d2c8432bc8ef"} Oct 09 09:47:47 crc kubenswrapper[4710]: I1009 09:47:47.702671 4710 generic.go:334] "Generic (PLEG): container finished" podID="3197a42e-d565-4f24-9115-990a46dfc659" containerID="26ff87a479813e6c2c638b3d88200dff5a22cef2b25e5c85bb7039ae17344248" exitCode=0 Oct 09 09:47:47 crc kubenswrapper[4710]: I1009 09:47:47.702787 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" event={"ID":"3197a42e-d565-4f24-9115-990a46dfc659","Type":"ContainerDied","Data":"26ff87a479813e6c2c638b3d88200dff5a22cef2b25e5c85bb7039ae17344248"} Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.106076 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.211939 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5th2\" (UniqueName: \"kubernetes.io/projected/3197a42e-d565-4f24-9115-990a46dfc659-kube-api-access-c5th2\") pod \"3197a42e-d565-4f24-9115-990a46dfc659\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.212280 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-ceph\") pod \"3197a42e-d565-4f24-9115-990a46dfc659\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.212402 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-inventory\") pod \"3197a42e-d565-4f24-9115-990a46dfc659\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.212513 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-ssh-key\") pod \"3197a42e-d565-4f24-9115-990a46dfc659\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.212548 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-libvirt-secret-0\") pod \"3197a42e-d565-4f24-9115-990a46dfc659\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.212674 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-libvirt-combined-ca-bundle\") pod \"3197a42e-d565-4f24-9115-990a46dfc659\" (UID: \"3197a42e-d565-4f24-9115-990a46dfc659\") " Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.219734 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-ceph" (OuterVolumeSpecName: "ceph") pod "3197a42e-d565-4f24-9115-990a46dfc659" (UID: "3197a42e-d565-4f24-9115-990a46dfc659"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.219863 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "3197a42e-d565-4f24-9115-990a46dfc659" (UID: "3197a42e-d565-4f24-9115-990a46dfc659"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.220451 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3197a42e-d565-4f24-9115-990a46dfc659-kube-api-access-c5th2" (OuterVolumeSpecName: "kube-api-access-c5th2") pod "3197a42e-d565-4f24-9115-990a46dfc659" (UID: "3197a42e-d565-4f24-9115-990a46dfc659"). InnerVolumeSpecName "kube-api-access-c5th2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.236118 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "3197a42e-d565-4f24-9115-990a46dfc659" (UID: "3197a42e-d565-4f24-9115-990a46dfc659"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.238617 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3197a42e-d565-4f24-9115-990a46dfc659" (UID: "3197a42e-d565-4f24-9115-990a46dfc659"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.247941 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-inventory" (OuterVolumeSpecName: "inventory") pod "3197a42e-d565-4f24-9115-990a46dfc659" (UID: "3197a42e-d565-4f24-9115-990a46dfc659"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.315570 4710 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.315993 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5th2\" (UniqueName: \"kubernetes.io/projected/3197a42e-d565-4f24-9115-990a46dfc659-kube-api-access-c5th2\") on node \"crc\" DevicePath \"\"" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.316005 4710 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.316017 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.316027 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.316038 4710 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/3197a42e-d565-4f24-9115-990a46dfc659-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.723098 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" event={"ID":"3197a42e-d565-4f24-9115-990a46dfc659","Type":"ContainerDied","Data":"f3ea780b4ca87ed37d315c4d0b3a83344ebafcf8d75506daef9f989d134f39e3"} Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.723143 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3ea780b4ca87ed37d315c4d0b3a83344ebafcf8d75506daef9f989d134f39e3" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.723234 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.838067 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v"] Oct 09 09:47:49 crc kubenswrapper[4710]: E1009 09:47:49.838685 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d79e7f1-2acc-4df9-be70-8775773f4f54" containerName="collect-profiles" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.838713 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d79e7f1-2acc-4df9-be70-8775773f4f54" containerName="collect-profiles" Oct 09 09:47:49 crc kubenswrapper[4710]: E1009 09:47:49.838748 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3197a42e-d565-4f24-9115-990a46dfc659" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.838756 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="3197a42e-d565-4f24-9115-990a46dfc659" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.838969 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d79e7f1-2acc-4df9-be70-8775773f4f54" containerName="collect-profiles" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.838997 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="3197a42e-d565-4f24-9115-990a46dfc659" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.842522 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.847615 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-xs6kb" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.847689 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.848561 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ceph-nova" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.848862 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.849068 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.849215 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.849481 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.849826 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.850140 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 09 09:47:49 crc kubenswrapper[4710]: I1009 09:47:49.854531 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v"] Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.027610 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.027802 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.027844 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.027896 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.027962 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.028020 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvdcs\" (UniqueName: \"kubernetes.io/projected/f9e2c502-e067-49c7-b805-adc3d054f0cf-kube-api-access-jvdcs\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.028093 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.028134 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.028281 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.028331 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/f9e2c502-e067-49c7-b805-adc3d054f0cf-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.028368 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.131215 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.131283 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.131343 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.131385 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.131451 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvdcs\" (UniqueName: \"kubernetes.io/projected/f9e2c502-e067-49c7-b805-adc3d054f0cf-kube-api-access-jvdcs\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.131480 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.131528 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.131624 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.131650 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/f9e2c502-e067-49c7-b805-adc3d054f0cf-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.131685 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.131782 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.133024 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-extra-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.133656 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/f9e2c502-e067-49c7-b805-adc3d054f0cf-ceph-nova-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.137011 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-migration-ssh-key-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.137741 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-custom-ceph-combined-ca-bundle\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.137754 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-ceph\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.138482 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-ssh-key\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.138823 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-cell1-compute-config-0\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.139022 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-inventory\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.141329 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-cell1-compute-config-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.144055 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-migration-ssh-key-1\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.149059 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvdcs\" (UniqueName: \"kubernetes.io/projected/f9e2c502-e067-49c7-b805-adc3d054f0cf-kube-api-access-jvdcs\") pod \"nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.160229 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.650180 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v"] Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.668404 4710 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 09:47:50 crc kubenswrapper[4710]: I1009 09:47:50.730850 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" event={"ID":"f9e2c502-e067-49c7-b805-adc3d054f0cf","Type":"ContainerStarted","Data":"28a491a5cf182b5e5a6bfdaa8a9c5117f3dcf2202a2264f4fe573e7096c2f404"} Oct 09 09:47:51 crc kubenswrapper[4710]: I1009 09:47:51.740749 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" event={"ID":"f9e2c502-e067-49c7-b805-adc3d054f0cf","Type":"ContainerStarted","Data":"2d39036e2c266357741c6e47182850932c91a12bf07d964ee9d4a6aa31b5d86b"} Oct 09 09:47:51 crc kubenswrapper[4710]: I1009 09:47:51.768691 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" podStartSLOduration=2.038123791 podStartE2EDuration="2.76867042s" podCreationTimestamp="2025-10-09 09:47:49 +0000 UTC" firstStartedPulling="2025-10-09 09:47:50.668146504 +0000 UTC m=+2594.158254901" lastFinishedPulling="2025-10-09 09:47:51.398693133 +0000 UTC m=+2594.888801530" observedRunningTime="2025-10-09 09:47:51.763281395 +0000 UTC m=+2595.253389791" watchObservedRunningTime="2025-10-09 09:47:51.76867042 +0000 UTC m=+2595.258778808" Oct 09 09:48:26 crc kubenswrapper[4710]: I1009 09:48:26.525186 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7mf8t"] Oct 09 09:48:26 crc kubenswrapper[4710]: I1009 09:48:26.527320 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7mf8t" Oct 09 09:48:26 crc kubenswrapper[4710]: I1009 09:48:26.547943 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7mf8t"] Oct 09 09:48:26 crc kubenswrapper[4710]: I1009 09:48:26.622064 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vkjh\" (UniqueName: \"kubernetes.io/projected/547e5806-f6d5-463d-8d17-71a545f3147d-kube-api-access-5vkjh\") pod \"redhat-marketplace-7mf8t\" (UID: \"547e5806-f6d5-463d-8d17-71a545f3147d\") " pod="openshift-marketplace/redhat-marketplace-7mf8t" Oct 09 09:48:26 crc kubenswrapper[4710]: I1009 09:48:26.622108 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/547e5806-f6d5-463d-8d17-71a545f3147d-catalog-content\") pod \"redhat-marketplace-7mf8t\" (UID: \"547e5806-f6d5-463d-8d17-71a545f3147d\") " pod="openshift-marketplace/redhat-marketplace-7mf8t" Oct 09 09:48:26 crc kubenswrapper[4710]: I1009 09:48:26.622156 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/547e5806-f6d5-463d-8d17-71a545f3147d-utilities\") pod \"redhat-marketplace-7mf8t\" (UID: \"547e5806-f6d5-463d-8d17-71a545f3147d\") " pod="openshift-marketplace/redhat-marketplace-7mf8t" Oct 09 09:48:26 crc kubenswrapper[4710]: I1009 09:48:26.723268 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/547e5806-f6d5-463d-8d17-71a545f3147d-utilities\") pod \"redhat-marketplace-7mf8t\" (UID: \"547e5806-f6d5-463d-8d17-71a545f3147d\") " pod="openshift-marketplace/redhat-marketplace-7mf8t" Oct 09 09:48:26 crc kubenswrapper[4710]: I1009 09:48:26.723386 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vkjh\" (UniqueName: \"kubernetes.io/projected/547e5806-f6d5-463d-8d17-71a545f3147d-kube-api-access-5vkjh\") pod \"redhat-marketplace-7mf8t\" (UID: \"547e5806-f6d5-463d-8d17-71a545f3147d\") " pod="openshift-marketplace/redhat-marketplace-7mf8t" Oct 09 09:48:26 crc kubenswrapper[4710]: I1009 09:48:26.723408 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/547e5806-f6d5-463d-8d17-71a545f3147d-catalog-content\") pod \"redhat-marketplace-7mf8t\" (UID: \"547e5806-f6d5-463d-8d17-71a545f3147d\") " pod="openshift-marketplace/redhat-marketplace-7mf8t" Oct 09 09:48:26 crc kubenswrapper[4710]: I1009 09:48:26.723866 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/547e5806-f6d5-463d-8d17-71a545f3147d-catalog-content\") pod \"redhat-marketplace-7mf8t\" (UID: \"547e5806-f6d5-463d-8d17-71a545f3147d\") " pod="openshift-marketplace/redhat-marketplace-7mf8t" Oct 09 09:48:26 crc kubenswrapper[4710]: I1009 09:48:26.724119 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/547e5806-f6d5-463d-8d17-71a545f3147d-utilities\") pod \"redhat-marketplace-7mf8t\" (UID: \"547e5806-f6d5-463d-8d17-71a545f3147d\") " pod="openshift-marketplace/redhat-marketplace-7mf8t" Oct 09 09:48:26 crc kubenswrapper[4710]: I1009 09:48:26.742707 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vkjh\" (UniqueName: \"kubernetes.io/projected/547e5806-f6d5-463d-8d17-71a545f3147d-kube-api-access-5vkjh\") pod \"redhat-marketplace-7mf8t\" (UID: \"547e5806-f6d5-463d-8d17-71a545f3147d\") " pod="openshift-marketplace/redhat-marketplace-7mf8t" Oct 09 09:48:26 crc kubenswrapper[4710]: I1009 09:48:26.842786 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7mf8t" Oct 09 09:48:27 crc kubenswrapper[4710]: I1009 09:48:27.286192 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7mf8t"] Oct 09 09:48:28 crc kubenswrapper[4710]: I1009 09:48:28.055038 4710 generic.go:334] "Generic (PLEG): container finished" podID="547e5806-f6d5-463d-8d17-71a545f3147d" containerID="a86dce9aae0e57006eda52e7ddd6a9f77d60e52eb645daaccc91737a7705d0f8" exitCode=0 Oct 09 09:48:28 crc kubenswrapper[4710]: I1009 09:48:28.055087 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7mf8t" event={"ID":"547e5806-f6d5-463d-8d17-71a545f3147d","Type":"ContainerDied","Data":"a86dce9aae0e57006eda52e7ddd6a9f77d60e52eb645daaccc91737a7705d0f8"} Oct 09 09:48:28 crc kubenswrapper[4710]: I1009 09:48:28.055360 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7mf8t" event={"ID":"547e5806-f6d5-463d-8d17-71a545f3147d","Type":"ContainerStarted","Data":"b21239ed421a25b6f829ed0fe231f606ca669f4e76de2c2e8e7d3511fe76602a"} Oct 09 09:48:29 crc kubenswrapper[4710]: I1009 09:48:29.065012 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7mf8t" event={"ID":"547e5806-f6d5-463d-8d17-71a545f3147d","Type":"ContainerStarted","Data":"8cae88d512eab834aaff2f670f08706f7036389b200a8c97c1d85f06a23bcc52"} Oct 09 09:48:30 crc kubenswrapper[4710]: I1009 09:48:30.077811 4710 generic.go:334] "Generic (PLEG): container finished" podID="547e5806-f6d5-463d-8d17-71a545f3147d" containerID="8cae88d512eab834aaff2f670f08706f7036389b200a8c97c1d85f06a23bcc52" exitCode=0 Oct 09 09:48:30 crc kubenswrapper[4710]: I1009 09:48:30.077939 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7mf8t" event={"ID":"547e5806-f6d5-463d-8d17-71a545f3147d","Type":"ContainerDied","Data":"8cae88d512eab834aaff2f670f08706f7036389b200a8c97c1d85f06a23bcc52"} Oct 09 09:48:31 crc kubenswrapper[4710]: I1009 09:48:31.091246 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7mf8t" event={"ID":"547e5806-f6d5-463d-8d17-71a545f3147d","Type":"ContainerStarted","Data":"dcab47e50eb340b34c02a34e85b7ce92a069890fda082730a6ba18e7b6517e83"} Oct 09 09:48:31 crc kubenswrapper[4710]: I1009 09:48:31.117585 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7mf8t" podStartSLOduration=2.496381422 podStartE2EDuration="5.117565184s" podCreationTimestamp="2025-10-09 09:48:26 +0000 UTC" firstStartedPulling="2025-10-09 09:48:28.057310144 +0000 UTC m=+2631.547418542" lastFinishedPulling="2025-10-09 09:48:30.678493907 +0000 UTC m=+2634.168602304" observedRunningTime="2025-10-09 09:48:31.110237813 +0000 UTC m=+2634.600346209" watchObservedRunningTime="2025-10-09 09:48:31.117565184 +0000 UTC m=+2634.607673581" Oct 09 09:48:36 crc kubenswrapper[4710]: I1009 09:48:36.842833 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7mf8t" Oct 09 09:48:36 crc kubenswrapper[4710]: I1009 09:48:36.843253 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7mf8t" Oct 09 09:48:36 crc kubenswrapper[4710]: I1009 09:48:36.878847 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7mf8t" Oct 09 09:48:37 crc kubenswrapper[4710]: I1009 09:48:37.184766 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7mf8t" Oct 09 09:48:37 crc kubenswrapper[4710]: I1009 09:48:37.756274 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7mf8t"] Oct 09 09:48:39 crc kubenswrapper[4710]: I1009 09:48:39.178942 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7mf8t" podUID="547e5806-f6d5-463d-8d17-71a545f3147d" containerName="registry-server" containerID="cri-o://dcab47e50eb340b34c02a34e85b7ce92a069890fda082730a6ba18e7b6517e83" gracePeriod=2 Oct 09 09:48:39 crc kubenswrapper[4710]: I1009 09:48:39.608643 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7mf8t" Oct 09 09:48:39 crc kubenswrapper[4710]: I1009 09:48:39.724462 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/547e5806-f6d5-463d-8d17-71a545f3147d-utilities\") pod \"547e5806-f6d5-463d-8d17-71a545f3147d\" (UID: \"547e5806-f6d5-463d-8d17-71a545f3147d\") " Oct 09 09:48:39 crc kubenswrapper[4710]: I1009 09:48:39.724796 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vkjh\" (UniqueName: \"kubernetes.io/projected/547e5806-f6d5-463d-8d17-71a545f3147d-kube-api-access-5vkjh\") pod \"547e5806-f6d5-463d-8d17-71a545f3147d\" (UID: \"547e5806-f6d5-463d-8d17-71a545f3147d\") " Oct 09 09:48:39 crc kubenswrapper[4710]: I1009 09:48:39.724914 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/547e5806-f6d5-463d-8d17-71a545f3147d-catalog-content\") pod \"547e5806-f6d5-463d-8d17-71a545f3147d\" (UID: \"547e5806-f6d5-463d-8d17-71a545f3147d\") " Oct 09 09:48:39 crc kubenswrapper[4710]: I1009 09:48:39.725239 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/547e5806-f6d5-463d-8d17-71a545f3147d-utilities" (OuterVolumeSpecName: "utilities") pod "547e5806-f6d5-463d-8d17-71a545f3147d" (UID: "547e5806-f6d5-463d-8d17-71a545f3147d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:48:39 crc kubenswrapper[4710]: I1009 09:48:39.725616 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/547e5806-f6d5-463d-8d17-71a545f3147d-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:48:39 crc kubenswrapper[4710]: I1009 09:48:39.729178 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/547e5806-f6d5-463d-8d17-71a545f3147d-kube-api-access-5vkjh" (OuterVolumeSpecName: "kube-api-access-5vkjh") pod "547e5806-f6d5-463d-8d17-71a545f3147d" (UID: "547e5806-f6d5-463d-8d17-71a545f3147d"). InnerVolumeSpecName "kube-api-access-5vkjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:48:39 crc kubenswrapper[4710]: I1009 09:48:39.748030 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/547e5806-f6d5-463d-8d17-71a545f3147d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "547e5806-f6d5-463d-8d17-71a545f3147d" (UID: "547e5806-f6d5-463d-8d17-71a545f3147d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:48:39 crc kubenswrapper[4710]: I1009 09:48:39.826868 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vkjh\" (UniqueName: \"kubernetes.io/projected/547e5806-f6d5-463d-8d17-71a545f3147d-kube-api-access-5vkjh\") on node \"crc\" DevicePath \"\"" Oct 09 09:48:39 crc kubenswrapper[4710]: I1009 09:48:39.827052 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/547e5806-f6d5-463d-8d17-71a545f3147d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:48:40 crc kubenswrapper[4710]: I1009 09:48:40.186319 4710 generic.go:334] "Generic (PLEG): container finished" podID="547e5806-f6d5-463d-8d17-71a545f3147d" containerID="dcab47e50eb340b34c02a34e85b7ce92a069890fda082730a6ba18e7b6517e83" exitCode=0 Oct 09 09:48:40 crc kubenswrapper[4710]: I1009 09:48:40.186521 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7mf8t" event={"ID":"547e5806-f6d5-463d-8d17-71a545f3147d","Type":"ContainerDied","Data":"dcab47e50eb340b34c02a34e85b7ce92a069890fda082730a6ba18e7b6517e83"} Oct 09 09:48:40 crc kubenswrapper[4710]: I1009 09:48:40.186618 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7mf8t" event={"ID":"547e5806-f6d5-463d-8d17-71a545f3147d","Type":"ContainerDied","Data":"b21239ed421a25b6f829ed0fe231f606ca669f4e76de2c2e8e7d3511fe76602a"} Oct 09 09:48:40 crc kubenswrapper[4710]: I1009 09:48:40.186637 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7mf8t" Oct 09 09:48:40 crc kubenswrapper[4710]: I1009 09:48:40.186643 4710 scope.go:117] "RemoveContainer" containerID="dcab47e50eb340b34c02a34e85b7ce92a069890fda082730a6ba18e7b6517e83" Oct 09 09:48:40 crc kubenswrapper[4710]: I1009 09:48:40.214034 4710 scope.go:117] "RemoveContainer" containerID="8cae88d512eab834aaff2f670f08706f7036389b200a8c97c1d85f06a23bcc52" Oct 09 09:48:40 crc kubenswrapper[4710]: I1009 09:48:40.216945 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7mf8t"] Oct 09 09:48:40 crc kubenswrapper[4710]: I1009 09:48:40.224174 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7mf8t"] Oct 09 09:48:40 crc kubenswrapper[4710]: I1009 09:48:40.229418 4710 scope.go:117] "RemoveContainer" containerID="a86dce9aae0e57006eda52e7ddd6a9f77d60e52eb645daaccc91737a7705d0f8" Oct 09 09:48:40 crc kubenswrapper[4710]: I1009 09:48:40.263253 4710 scope.go:117] "RemoveContainer" containerID="dcab47e50eb340b34c02a34e85b7ce92a069890fda082730a6ba18e7b6517e83" Oct 09 09:48:40 crc kubenswrapper[4710]: E1009 09:48:40.263732 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dcab47e50eb340b34c02a34e85b7ce92a069890fda082730a6ba18e7b6517e83\": container with ID starting with dcab47e50eb340b34c02a34e85b7ce92a069890fda082730a6ba18e7b6517e83 not found: ID does not exist" containerID="dcab47e50eb340b34c02a34e85b7ce92a069890fda082730a6ba18e7b6517e83" Oct 09 09:48:40 crc kubenswrapper[4710]: I1009 09:48:40.263781 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcab47e50eb340b34c02a34e85b7ce92a069890fda082730a6ba18e7b6517e83"} err="failed to get container status \"dcab47e50eb340b34c02a34e85b7ce92a069890fda082730a6ba18e7b6517e83\": rpc error: code = NotFound desc = could not find container \"dcab47e50eb340b34c02a34e85b7ce92a069890fda082730a6ba18e7b6517e83\": container with ID starting with dcab47e50eb340b34c02a34e85b7ce92a069890fda082730a6ba18e7b6517e83 not found: ID does not exist" Oct 09 09:48:40 crc kubenswrapper[4710]: I1009 09:48:40.263811 4710 scope.go:117] "RemoveContainer" containerID="8cae88d512eab834aaff2f670f08706f7036389b200a8c97c1d85f06a23bcc52" Oct 09 09:48:40 crc kubenswrapper[4710]: E1009 09:48:40.264224 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8cae88d512eab834aaff2f670f08706f7036389b200a8c97c1d85f06a23bcc52\": container with ID starting with 8cae88d512eab834aaff2f670f08706f7036389b200a8c97c1d85f06a23bcc52 not found: ID does not exist" containerID="8cae88d512eab834aaff2f670f08706f7036389b200a8c97c1d85f06a23bcc52" Oct 09 09:48:40 crc kubenswrapper[4710]: I1009 09:48:40.264257 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cae88d512eab834aaff2f670f08706f7036389b200a8c97c1d85f06a23bcc52"} err="failed to get container status \"8cae88d512eab834aaff2f670f08706f7036389b200a8c97c1d85f06a23bcc52\": rpc error: code = NotFound desc = could not find container \"8cae88d512eab834aaff2f670f08706f7036389b200a8c97c1d85f06a23bcc52\": container with ID starting with 8cae88d512eab834aaff2f670f08706f7036389b200a8c97c1d85f06a23bcc52 not found: ID does not exist" Oct 09 09:48:40 crc kubenswrapper[4710]: I1009 09:48:40.264283 4710 scope.go:117] "RemoveContainer" containerID="a86dce9aae0e57006eda52e7ddd6a9f77d60e52eb645daaccc91737a7705d0f8" Oct 09 09:48:40 crc kubenswrapper[4710]: E1009 09:48:40.264722 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a86dce9aae0e57006eda52e7ddd6a9f77d60e52eb645daaccc91737a7705d0f8\": container with ID starting with a86dce9aae0e57006eda52e7ddd6a9f77d60e52eb645daaccc91737a7705d0f8 not found: ID does not exist" containerID="a86dce9aae0e57006eda52e7ddd6a9f77d60e52eb645daaccc91737a7705d0f8" Oct 09 09:48:40 crc kubenswrapper[4710]: I1009 09:48:40.264750 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a86dce9aae0e57006eda52e7ddd6a9f77d60e52eb645daaccc91737a7705d0f8"} err="failed to get container status \"a86dce9aae0e57006eda52e7ddd6a9f77d60e52eb645daaccc91737a7705d0f8\": rpc error: code = NotFound desc = could not find container \"a86dce9aae0e57006eda52e7ddd6a9f77d60e52eb645daaccc91737a7705d0f8\": container with ID starting with a86dce9aae0e57006eda52e7ddd6a9f77d60e52eb645daaccc91737a7705d0f8 not found: ID does not exist" Oct 09 09:48:40 crc kubenswrapper[4710]: I1009 09:48:40.826179 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="547e5806-f6d5-463d-8d17-71a545f3147d" path="/var/lib/kubelet/pods/547e5806-f6d5-463d-8d17-71a545f3147d/volumes" Oct 09 09:49:05 crc kubenswrapper[4710]: I1009 09:49:05.545837 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:49:05 crc kubenswrapper[4710]: I1009 09:49:05.546572 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:49:35 crc kubenswrapper[4710]: I1009 09:49:35.545847 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:49:35 crc kubenswrapper[4710]: I1009 09:49:35.546627 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:49:37 crc kubenswrapper[4710]: I1009 09:49:37.085778 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-49q59"] Oct 09 09:49:37 crc kubenswrapper[4710]: E1009 09:49:37.086407 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="547e5806-f6d5-463d-8d17-71a545f3147d" containerName="extract-utilities" Oct 09 09:49:37 crc kubenswrapper[4710]: I1009 09:49:37.086422 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="547e5806-f6d5-463d-8d17-71a545f3147d" containerName="extract-utilities" Oct 09 09:49:37 crc kubenswrapper[4710]: E1009 09:49:37.086457 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="547e5806-f6d5-463d-8d17-71a545f3147d" containerName="extract-content" Oct 09 09:49:37 crc kubenswrapper[4710]: I1009 09:49:37.086464 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="547e5806-f6d5-463d-8d17-71a545f3147d" containerName="extract-content" Oct 09 09:49:37 crc kubenswrapper[4710]: E1009 09:49:37.086497 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="547e5806-f6d5-463d-8d17-71a545f3147d" containerName="registry-server" Oct 09 09:49:37 crc kubenswrapper[4710]: I1009 09:49:37.086504 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="547e5806-f6d5-463d-8d17-71a545f3147d" containerName="registry-server" Oct 09 09:49:37 crc kubenswrapper[4710]: I1009 09:49:37.086697 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="547e5806-f6d5-463d-8d17-71a545f3147d" containerName="registry-server" Oct 09 09:49:37 crc kubenswrapper[4710]: I1009 09:49:37.087962 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-49q59" Oct 09 09:49:37 crc kubenswrapper[4710]: I1009 09:49:37.104930 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-49q59"] Oct 09 09:49:37 crc kubenswrapper[4710]: I1009 09:49:37.142335 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa59c635-a0dc-41f2-822f-fe54a0b4292d-utilities\") pod \"community-operators-49q59\" (UID: \"aa59c635-a0dc-41f2-822f-fe54a0b4292d\") " pod="openshift-marketplace/community-operators-49q59" Oct 09 09:49:37 crc kubenswrapper[4710]: I1009 09:49:37.142403 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gc5fq\" (UniqueName: \"kubernetes.io/projected/aa59c635-a0dc-41f2-822f-fe54a0b4292d-kube-api-access-gc5fq\") pod \"community-operators-49q59\" (UID: \"aa59c635-a0dc-41f2-822f-fe54a0b4292d\") " pod="openshift-marketplace/community-operators-49q59" Oct 09 09:49:37 crc kubenswrapper[4710]: I1009 09:49:37.142471 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa59c635-a0dc-41f2-822f-fe54a0b4292d-catalog-content\") pod \"community-operators-49q59\" (UID: \"aa59c635-a0dc-41f2-822f-fe54a0b4292d\") " pod="openshift-marketplace/community-operators-49q59" Oct 09 09:49:37 crc kubenswrapper[4710]: I1009 09:49:37.243813 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa59c635-a0dc-41f2-822f-fe54a0b4292d-catalog-content\") pod \"community-operators-49q59\" (UID: \"aa59c635-a0dc-41f2-822f-fe54a0b4292d\") " pod="openshift-marketplace/community-operators-49q59" Oct 09 09:49:37 crc kubenswrapper[4710]: I1009 09:49:37.243909 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa59c635-a0dc-41f2-822f-fe54a0b4292d-utilities\") pod \"community-operators-49q59\" (UID: \"aa59c635-a0dc-41f2-822f-fe54a0b4292d\") " pod="openshift-marketplace/community-operators-49q59" Oct 09 09:49:37 crc kubenswrapper[4710]: I1009 09:49:37.243959 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gc5fq\" (UniqueName: \"kubernetes.io/projected/aa59c635-a0dc-41f2-822f-fe54a0b4292d-kube-api-access-gc5fq\") pod \"community-operators-49q59\" (UID: \"aa59c635-a0dc-41f2-822f-fe54a0b4292d\") " pod="openshift-marketplace/community-operators-49q59" Oct 09 09:49:37 crc kubenswrapper[4710]: I1009 09:49:37.244268 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa59c635-a0dc-41f2-822f-fe54a0b4292d-catalog-content\") pod \"community-operators-49q59\" (UID: \"aa59c635-a0dc-41f2-822f-fe54a0b4292d\") " pod="openshift-marketplace/community-operators-49q59" Oct 09 09:49:37 crc kubenswrapper[4710]: I1009 09:49:37.244590 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa59c635-a0dc-41f2-822f-fe54a0b4292d-utilities\") pod \"community-operators-49q59\" (UID: \"aa59c635-a0dc-41f2-822f-fe54a0b4292d\") " pod="openshift-marketplace/community-operators-49q59" Oct 09 09:49:37 crc kubenswrapper[4710]: I1009 09:49:37.261199 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gc5fq\" (UniqueName: \"kubernetes.io/projected/aa59c635-a0dc-41f2-822f-fe54a0b4292d-kube-api-access-gc5fq\") pod \"community-operators-49q59\" (UID: \"aa59c635-a0dc-41f2-822f-fe54a0b4292d\") " pod="openshift-marketplace/community-operators-49q59" Oct 09 09:49:37 crc kubenswrapper[4710]: I1009 09:49:37.406361 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-49q59" Oct 09 09:49:38 crc kubenswrapper[4710]: I1009 09:49:38.387211 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-49q59"] Oct 09 09:49:38 crc kubenswrapper[4710]: I1009 09:49:38.748160 4710 generic.go:334] "Generic (PLEG): container finished" podID="aa59c635-a0dc-41f2-822f-fe54a0b4292d" containerID="a2542a93c920a9db3819c0e1386c4b06fe1c56da67e51a316842d7993532cb77" exitCode=0 Oct 09 09:49:38 crc kubenswrapper[4710]: I1009 09:49:38.748222 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-49q59" event={"ID":"aa59c635-a0dc-41f2-822f-fe54a0b4292d","Type":"ContainerDied","Data":"a2542a93c920a9db3819c0e1386c4b06fe1c56da67e51a316842d7993532cb77"} Oct 09 09:49:38 crc kubenswrapper[4710]: I1009 09:49:38.748263 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-49q59" event={"ID":"aa59c635-a0dc-41f2-822f-fe54a0b4292d","Type":"ContainerStarted","Data":"76c1bca23b9c77a8bc9fb0cdf5bf9e70a08556beff12892d6ebc4d5da1400ad1"} Oct 09 09:49:40 crc kubenswrapper[4710]: I1009 09:49:40.768459 4710 generic.go:334] "Generic (PLEG): container finished" podID="aa59c635-a0dc-41f2-822f-fe54a0b4292d" containerID="57eb81774861834cbe820f99897b4c3f452bfab98d0d37c104afea8eccefeaa0" exitCode=0 Oct 09 09:49:40 crc kubenswrapper[4710]: I1009 09:49:40.768564 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-49q59" event={"ID":"aa59c635-a0dc-41f2-822f-fe54a0b4292d","Type":"ContainerDied","Data":"57eb81774861834cbe820f99897b4c3f452bfab98d0d37c104afea8eccefeaa0"} Oct 09 09:49:41 crc kubenswrapper[4710]: I1009 09:49:41.782159 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-49q59" event={"ID":"aa59c635-a0dc-41f2-822f-fe54a0b4292d","Type":"ContainerStarted","Data":"6e15b47e1026fa80ee4e740d70036ac584f89d41474f1552b14ec33702be0264"} Oct 09 09:49:41 crc kubenswrapper[4710]: I1009 09:49:41.805902 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-49q59" podStartSLOduration=2.021915043 podStartE2EDuration="4.805882852s" podCreationTimestamp="2025-10-09 09:49:37 +0000 UTC" firstStartedPulling="2025-10-09 09:49:38.750293122 +0000 UTC m=+2702.240401518" lastFinishedPulling="2025-10-09 09:49:41.53426093 +0000 UTC m=+2705.024369327" observedRunningTime="2025-10-09 09:49:41.802596102 +0000 UTC m=+2705.292704499" watchObservedRunningTime="2025-10-09 09:49:41.805882852 +0000 UTC m=+2705.295991250" Oct 09 09:49:47 crc kubenswrapper[4710]: I1009 09:49:47.407605 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-49q59" Oct 09 09:49:47 crc kubenswrapper[4710]: I1009 09:49:47.408902 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-49q59" Oct 09 09:49:47 crc kubenswrapper[4710]: I1009 09:49:47.450526 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-49q59" Oct 09 09:49:47 crc kubenswrapper[4710]: I1009 09:49:47.871142 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-49q59" Oct 09 09:49:47 crc kubenswrapper[4710]: I1009 09:49:47.927448 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-49q59"] Oct 09 09:49:49 crc kubenswrapper[4710]: I1009 09:49:49.849063 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-49q59" podUID="aa59c635-a0dc-41f2-822f-fe54a0b4292d" containerName="registry-server" containerID="cri-o://6e15b47e1026fa80ee4e740d70036ac584f89d41474f1552b14ec33702be0264" gracePeriod=2 Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.341806 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-49q59" Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.401877 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa59c635-a0dc-41f2-822f-fe54a0b4292d-catalog-content\") pod \"aa59c635-a0dc-41f2-822f-fe54a0b4292d\" (UID: \"aa59c635-a0dc-41f2-822f-fe54a0b4292d\") " Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.401936 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa59c635-a0dc-41f2-822f-fe54a0b4292d-utilities\") pod \"aa59c635-a0dc-41f2-822f-fe54a0b4292d\" (UID: \"aa59c635-a0dc-41f2-822f-fe54a0b4292d\") " Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.402074 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gc5fq\" (UniqueName: \"kubernetes.io/projected/aa59c635-a0dc-41f2-822f-fe54a0b4292d-kube-api-access-gc5fq\") pod \"aa59c635-a0dc-41f2-822f-fe54a0b4292d\" (UID: \"aa59c635-a0dc-41f2-822f-fe54a0b4292d\") " Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.402872 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa59c635-a0dc-41f2-822f-fe54a0b4292d-utilities" (OuterVolumeSpecName: "utilities") pod "aa59c635-a0dc-41f2-822f-fe54a0b4292d" (UID: "aa59c635-a0dc-41f2-822f-fe54a0b4292d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.404189 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aa59c635-a0dc-41f2-822f-fe54a0b4292d-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.408805 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa59c635-a0dc-41f2-822f-fe54a0b4292d-kube-api-access-gc5fq" (OuterVolumeSpecName: "kube-api-access-gc5fq") pod "aa59c635-a0dc-41f2-822f-fe54a0b4292d" (UID: "aa59c635-a0dc-41f2-822f-fe54a0b4292d"). InnerVolumeSpecName "kube-api-access-gc5fq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.445482 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa59c635-a0dc-41f2-822f-fe54a0b4292d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aa59c635-a0dc-41f2-822f-fe54a0b4292d" (UID: "aa59c635-a0dc-41f2-822f-fe54a0b4292d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.508272 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aa59c635-a0dc-41f2-822f-fe54a0b4292d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.508317 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gc5fq\" (UniqueName: \"kubernetes.io/projected/aa59c635-a0dc-41f2-822f-fe54a0b4292d-kube-api-access-gc5fq\") on node \"crc\" DevicePath \"\"" Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.865140 4710 generic.go:334] "Generic (PLEG): container finished" podID="aa59c635-a0dc-41f2-822f-fe54a0b4292d" containerID="6e15b47e1026fa80ee4e740d70036ac584f89d41474f1552b14ec33702be0264" exitCode=0 Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.865196 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-49q59" event={"ID":"aa59c635-a0dc-41f2-822f-fe54a0b4292d","Type":"ContainerDied","Data":"6e15b47e1026fa80ee4e740d70036ac584f89d41474f1552b14ec33702be0264"} Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.865236 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-49q59" event={"ID":"aa59c635-a0dc-41f2-822f-fe54a0b4292d","Type":"ContainerDied","Data":"76c1bca23b9c77a8bc9fb0cdf5bf9e70a08556beff12892d6ebc4d5da1400ad1"} Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.865256 4710 scope.go:117] "RemoveContainer" containerID="6e15b47e1026fa80ee4e740d70036ac584f89d41474f1552b14ec33702be0264" Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.865410 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-49q59" Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.901530 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-49q59"] Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.907129 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-49q59"] Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.909785 4710 scope.go:117] "RemoveContainer" containerID="57eb81774861834cbe820f99897b4c3f452bfab98d0d37c104afea8eccefeaa0" Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.942556 4710 scope.go:117] "RemoveContainer" containerID="a2542a93c920a9db3819c0e1386c4b06fe1c56da67e51a316842d7993532cb77" Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.964058 4710 scope.go:117] "RemoveContainer" containerID="6e15b47e1026fa80ee4e740d70036ac584f89d41474f1552b14ec33702be0264" Oct 09 09:49:50 crc kubenswrapper[4710]: E1009 09:49:50.968007 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e15b47e1026fa80ee4e740d70036ac584f89d41474f1552b14ec33702be0264\": container with ID starting with 6e15b47e1026fa80ee4e740d70036ac584f89d41474f1552b14ec33702be0264 not found: ID does not exist" containerID="6e15b47e1026fa80ee4e740d70036ac584f89d41474f1552b14ec33702be0264" Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.968044 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e15b47e1026fa80ee4e740d70036ac584f89d41474f1552b14ec33702be0264"} err="failed to get container status \"6e15b47e1026fa80ee4e740d70036ac584f89d41474f1552b14ec33702be0264\": rpc error: code = NotFound desc = could not find container \"6e15b47e1026fa80ee4e740d70036ac584f89d41474f1552b14ec33702be0264\": container with ID starting with 6e15b47e1026fa80ee4e740d70036ac584f89d41474f1552b14ec33702be0264 not found: ID does not exist" Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.968070 4710 scope.go:117] "RemoveContainer" containerID="57eb81774861834cbe820f99897b4c3f452bfab98d0d37c104afea8eccefeaa0" Oct 09 09:49:50 crc kubenswrapper[4710]: E1009 09:49:50.968452 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57eb81774861834cbe820f99897b4c3f452bfab98d0d37c104afea8eccefeaa0\": container with ID starting with 57eb81774861834cbe820f99897b4c3f452bfab98d0d37c104afea8eccefeaa0 not found: ID does not exist" containerID="57eb81774861834cbe820f99897b4c3f452bfab98d0d37c104afea8eccefeaa0" Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.968494 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57eb81774861834cbe820f99897b4c3f452bfab98d0d37c104afea8eccefeaa0"} err="failed to get container status \"57eb81774861834cbe820f99897b4c3f452bfab98d0d37c104afea8eccefeaa0\": rpc error: code = NotFound desc = could not find container \"57eb81774861834cbe820f99897b4c3f452bfab98d0d37c104afea8eccefeaa0\": container with ID starting with 57eb81774861834cbe820f99897b4c3f452bfab98d0d37c104afea8eccefeaa0 not found: ID does not exist" Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.968524 4710 scope.go:117] "RemoveContainer" containerID="a2542a93c920a9db3819c0e1386c4b06fe1c56da67e51a316842d7993532cb77" Oct 09 09:49:50 crc kubenswrapper[4710]: E1009 09:49:50.969039 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2542a93c920a9db3819c0e1386c4b06fe1c56da67e51a316842d7993532cb77\": container with ID starting with a2542a93c920a9db3819c0e1386c4b06fe1c56da67e51a316842d7993532cb77 not found: ID does not exist" containerID="a2542a93c920a9db3819c0e1386c4b06fe1c56da67e51a316842d7993532cb77" Oct 09 09:49:50 crc kubenswrapper[4710]: I1009 09:49:50.969057 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2542a93c920a9db3819c0e1386c4b06fe1c56da67e51a316842d7993532cb77"} err="failed to get container status \"a2542a93c920a9db3819c0e1386c4b06fe1c56da67e51a316842d7993532cb77\": rpc error: code = NotFound desc = could not find container \"a2542a93c920a9db3819c0e1386c4b06fe1c56da67e51a316842d7993532cb77\": container with ID starting with a2542a93c920a9db3819c0e1386c4b06fe1c56da67e51a316842d7993532cb77 not found: ID does not exist" Oct 09 09:49:52 crc kubenswrapper[4710]: I1009 09:49:52.824581 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa59c635-a0dc-41f2-822f-fe54a0b4292d" path="/var/lib/kubelet/pods/aa59c635-a0dc-41f2-822f-fe54a0b4292d/volumes" Oct 09 09:50:05 crc kubenswrapper[4710]: I1009 09:50:05.546866 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:50:05 crc kubenswrapper[4710]: I1009 09:50:05.547892 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:50:05 crc kubenswrapper[4710]: I1009 09:50:05.547950 4710 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:50:05 crc kubenswrapper[4710]: I1009 09:50:05.548699 4710 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"053058b473714540c8ffd33c2543c1558912e10c236f0b0629e0d2c8432bc8ef"} pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 09:50:05 crc kubenswrapper[4710]: I1009 09:50:05.548765 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" containerID="cri-o://053058b473714540c8ffd33c2543c1558912e10c236f0b0629e0d2c8432bc8ef" gracePeriod=600 Oct 09 09:50:06 crc kubenswrapper[4710]: I1009 09:50:06.000898 4710 generic.go:334] "Generic (PLEG): container finished" podID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerID="053058b473714540c8ffd33c2543c1558912e10c236f0b0629e0d2c8432bc8ef" exitCode=0 Oct 09 09:50:06 crc kubenswrapper[4710]: I1009 09:50:06.000989 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerDied","Data":"053058b473714540c8ffd33c2543c1558912e10c236f0b0629e0d2c8432bc8ef"} Oct 09 09:50:06 crc kubenswrapper[4710]: I1009 09:50:06.001376 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerStarted","Data":"2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51"} Oct 09 09:50:06 crc kubenswrapper[4710]: I1009 09:50:06.001417 4710 scope.go:117] "RemoveContainer" containerID="ade196b564fb80a4a3835fa474e404c0e5c30758df108d5bbe51ba0074c577f2" Oct 09 09:50:46 crc kubenswrapper[4710]: I1009 09:50:46.400225 4710 generic.go:334] "Generic (PLEG): container finished" podID="f9e2c502-e067-49c7-b805-adc3d054f0cf" containerID="2d39036e2c266357741c6e47182850932c91a12bf07d964ee9d4a6aa31b5d86b" exitCode=0 Oct 09 09:50:46 crc kubenswrapper[4710]: I1009 09:50:46.400860 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" event={"ID":"f9e2c502-e067-49c7-b805-adc3d054f0cf","Type":"ContainerDied","Data":"2d39036e2c266357741c6e47182850932c91a12bf07d964ee9d4a6aa31b5d86b"} Oct 09 09:50:47 crc kubenswrapper[4710]: I1009 09:50:47.852921 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:50:47 crc kubenswrapper[4710]: I1009 09:50:47.899487 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-ceph\") pod \"f9e2c502-e067-49c7-b805-adc3d054f0cf\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " Oct 09 09:50:47 crc kubenswrapper[4710]: I1009 09:50:47.900042 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-inventory\") pod \"f9e2c502-e067-49c7-b805-adc3d054f0cf\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " Oct 09 09:50:47 crc kubenswrapper[4710]: I1009 09:50:47.900093 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-migration-ssh-key-1\") pod \"f9e2c502-e067-49c7-b805-adc3d054f0cf\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " Oct 09 09:50:47 crc kubenswrapper[4710]: I1009 09:50:47.907764 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-ceph" (OuterVolumeSpecName: "ceph") pod "f9e2c502-e067-49c7-b805-adc3d054f0cf" (UID: "f9e2c502-e067-49c7-b805-adc3d054f0cf"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:50:47 crc kubenswrapper[4710]: I1009 09:50:47.929218 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-inventory" (OuterVolumeSpecName: "inventory") pod "f9e2c502-e067-49c7-b805-adc3d054f0cf" (UID: "f9e2c502-e067-49c7-b805-adc3d054f0cf"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:50:47 crc kubenswrapper[4710]: I1009 09:50:47.935527 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "f9e2c502-e067-49c7-b805-adc3d054f0cf" (UID: "f9e2c502-e067-49c7-b805-adc3d054f0cf"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.001656 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-ssh-key\") pod \"f9e2c502-e067-49c7-b805-adc3d054f0cf\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.001718 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-extra-config-0\") pod \"f9e2c502-e067-49c7-b805-adc3d054f0cf\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.001764 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-cell1-compute-config-1\") pod \"f9e2c502-e067-49c7-b805-adc3d054f0cf\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.001784 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvdcs\" (UniqueName: \"kubernetes.io/projected/f9e2c502-e067-49c7-b805-adc3d054f0cf-kube-api-access-jvdcs\") pod \"f9e2c502-e067-49c7-b805-adc3d054f0cf\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.001868 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-cell1-compute-config-0\") pod \"f9e2c502-e067-49c7-b805-adc3d054f0cf\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.001885 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/f9e2c502-e067-49c7-b805-adc3d054f0cf-ceph-nova-0\") pod \"f9e2c502-e067-49c7-b805-adc3d054f0cf\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.001919 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-custom-ceph-combined-ca-bundle\") pod \"f9e2c502-e067-49c7-b805-adc3d054f0cf\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.001954 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-migration-ssh-key-0\") pod \"f9e2c502-e067-49c7-b805-adc3d054f0cf\" (UID: \"f9e2c502-e067-49c7-b805-adc3d054f0cf\") " Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.002203 4710 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.002255 4710 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-inventory\") on node \"crc\" DevicePath \"\"" Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.002265 4710 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.008814 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9e2c502-e067-49c7-b805-adc3d054f0cf-kube-api-access-jvdcs" (OuterVolumeSpecName: "kube-api-access-jvdcs") pod "f9e2c502-e067-49c7-b805-adc3d054f0cf" (UID: "f9e2c502-e067-49c7-b805-adc3d054f0cf"). InnerVolumeSpecName "kube-api-access-jvdcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.011911 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-custom-ceph-combined-ca-bundle" (OuterVolumeSpecName: "nova-custom-ceph-combined-ca-bundle") pod "f9e2c502-e067-49c7-b805-adc3d054f0cf" (UID: "f9e2c502-e067-49c7-b805-adc3d054f0cf"). InnerVolumeSpecName "nova-custom-ceph-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.029486 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "f9e2c502-e067-49c7-b805-adc3d054f0cf" (UID: "f9e2c502-e067-49c7-b805-adc3d054f0cf"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.031471 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9e2c502-e067-49c7-b805-adc3d054f0cf-ceph-nova-0" (OuterVolumeSpecName: "ceph-nova-0") pod "f9e2c502-e067-49c7-b805-adc3d054f0cf" (UID: "f9e2c502-e067-49c7-b805-adc3d054f0cf"). InnerVolumeSpecName "ceph-nova-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.041733 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "f9e2c502-e067-49c7-b805-adc3d054f0cf" (UID: "f9e2c502-e067-49c7-b805-adc3d054f0cf"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.043505 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "f9e2c502-e067-49c7-b805-adc3d054f0cf" (UID: "f9e2c502-e067-49c7-b805-adc3d054f0cf"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.044655 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f9e2c502-e067-49c7-b805-adc3d054f0cf" (UID: "f9e2c502-e067-49c7-b805-adc3d054f0cf"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.049915 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "f9e2c502-e067-49c7-b805-adc3d054f0cf" (UID: "f9e2c502-e067-49c7-b805-adc3d054f0cf"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.104450 4710 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.104482 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.104494 4710 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.104503 4710 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.104514 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvdcs\" (UniqueName: \"kubernetes.io/projected/f9e2c502-e067-49c7-b805-adc3d054f0cf-kube-api-access-jvdcs\") on node \"crc\" DevicePath \"\"" Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.104525 4710 reconciler_common.go:293] "Volume detached for volume \"ceph-nova-0\" (UniqueName: \"kubernetes.io/configmap/f9e2c502-e067-49c7-b805-adc3d054f0cf-ceph-nova-0\") on node \"crc\" DevicePath \"\"" Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.104535 4710 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.104544 4710 reconciler_common.go:293] "Volume detached for volume \"nova-custom-ceph-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9e2c502-e067-49c7-b805-adc3d054f0cf-nova-custom-ceph-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.420341 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" event={"ID":"f9e2c502-e067-49c7-b805-adc3d054f0cf","Type":"ContainerDied","Data":"28a491a5cf182b5e5a6bfdaa8a9c5117f3dcf2202a2264f4fe573e7096c2f404"} Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.420636 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="28a491a5cf182b5e5a6bfdaa8a9c5117f3dcf2202a2264f4fe573e7096c2f404" Oct 09 09:50:48 crc kubenswrapper[4710]: I1009 09:50:48.420388 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v" Oct 09 09:50:57 crc kubenswrapper[4710]: I1009 09:50:57.182056 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2qs8n"] Oct 09 09:50:57 crc kubenswrapper[4710]: E1009 09:50:57.182819 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9e2c502-e067-49c7-b805-adc3d054f0cf" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Oct 09 09:50:57 crc kubenswrapper[4710]: I1009 09:50:57.182831 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9e2c502-e067-49c7-b805-adc3d054f0cf" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Oct 09 09:50:57 crc kubenswrapper[4710]: E1009 09:50:57.182856 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa59c635-a0dc-41f2-822f-fe54a0b4292d" containerName="registry-server" Oct 09 09:50:57 crc kubenswrapper[4710]: I1009 09:50:57.182862 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa59c635-a0dc-41f2-822f-fe54a0b4292d" containerName="registry-server" Oct 09 09:50:57 crc kubenswrapper[4710]: E1009 09:50:57.182878 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa59c635-a0dc-41f2-822f-fe54a0b4292d" containerName="extract-content" Oct 09 09:50:57 crc kubenswrapper[4710]: I1009 09:50:57.182883 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa59c635-a0dc-41f2-822f-fe54a0b4292d" containerName="extract-content" Oct 09 09:50:57 crc kubenswrapper[4710]: E1009 09:50:57.182893 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa59c635-a0dc-41f2-822f-fe54a0b4292d" containerName="extract-utilities" Oct 09 09:50:57 crc kubenswrapper[4710]: I1009 09:50:57.182899 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa59c635-a0dc-41f2-822f-fe54a0b4292d" containerName="extract-utilities" Oct 09 09:50:57 crc kubenswrapper[4710]: I1009 09:50:57.183056 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9e2c502-e067-49c7-b805-adc3d054f0cf" containerName="nova-custom-ceph-edpm-deployment-openstack-edpm-ipam" Oct 09 09:50:57 crc kubenswrapper[4710]: I1009 09:50:57.183080 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa59c635-a0dc-41f2-822f-fe54a0b4292d" containerName="registry-server" Oct 09 09:50:57 crc kubenswrapper[4710]: I1009 09:50:57.184220 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2qs8n" Oct 09 09:50:57 crc kubenswrapper[4710]: I1009 09:50:57.192881 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2qs8n"] Oct 09 09:50:57 crc kubenswrapper[4710]: I1009 09:50:57.277144 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6w6kn\" (UniqueName: \"kubernetes.io/projected/a7aa8b50-9d7d-4735-8e06-0931decf7a95-kube-api-access-6w6kn\") pod \"redhat-operators-2qs8n\" (UID: \"a7aa8b50-9d7d-4735-8e06-0931decf7a95\") " pod="openshift-marketplace/redhat-operators-2qs8n" Oct 09 09:50:57 crc kubenswrapper[4710]: I1009 09:50:57.277421 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7aa8b50-9d7d-4735-8e06-0931decf7a95-catalog-content\") pod \"redhat-operators-2qs8n\" (UID: \"a7aa8b50-9d7d-4735-8e06-0931decf7a95\") " pod="openshift-marketplace/redhat-operators-2qs8n" Oct 09 09:50:57 crc kubenswrapper[4710]: I1009 09:50:57.277513 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7aa8b50-9d7d-4735-8e06-0931decf7a95-utilities\") pod \"redhat-operators-2qs8n\" (UID: \"a7aa8b50-9d7d-4735-8e06-0931decf7a95\") " pod="openshift-marketplace/redhat-operators-2qs8n" Oct 09 09:50:57 crc kubenswrapper[4710]: I1009 09:50:57.379560 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7aa8b50-9d7d-4735-8e06-0931decf7a95-catalog-content\") pod \"redhat-operators-2qs8n\" (UID: \"a7aa8b50-9d7d-4735-8e06-0931decf7a95\") " pod="openshift-marketplace/redhat-operators-2qs8n" Oct 09 09:50:57 crc kubenswrapper[4710]: I1009 09:50:57.379643 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7aa8b50-9d7d-4735-8e06-0931decf7a95-utilities\") pod \"redhat-operators-2qs8n\" (UID: \"a7aa8b50-9d7d-4735-8e06-0931decf7a95\") " pod="openshift-marketplace/redhat-operators-2qs8n" Oct 09 09:50:57 crc kubenswrapper[4710]: I1009 09:50:57.379711 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6w6kn\" (UniqueName: \"kubernetes.io/projected/a7aa8b50-9d7d-4735-8e06-0931decf7a95-kube-api-access-6w6kn\") pod \"redhat-operators-2qs8n\" (UID: \"a7aa8b50-9d7d-4735-8e06-0931decf7a95\") " pod="openshift-marketplace/redhat-operators-2qs8n" Oct 09 09:50:57 crc kubenswrapper[4710]: I1009 09:50:57.380024 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7aa8b50-9d7d-4735-8e06-0931decf7a95-catalog-content\") pod \"redhat-operators-2qs8n\" (UID: \"a7aa8b50-9d7d-4735-8e06-0931decf7a95\") " pod="openshift-marketplace/redhat-operators-2qs8n" Oct 09 09:50:57 crc kubenswrapper[4710]: I1009 09:50:57.380159 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7aa8b50-9d7d-4735-8e06-0931decf7a95-utilities\") pod \"redhat-operators-2qs8n\" (UID: \"a7aa8b50-9d7d-4735-8e06-0931decf7a95\") " pod="openshift-marketplace/redhat-operators-2qs8n" Oct 09 09:50:57 crc kubenswrapper[4710]: I1009 09:50:57.396473 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6w6kn\" (UniqueName: \"kubernetes.io/projected/a7aa8b50-9d7d-4735-8e06-0931decf7a95-kube-api-access-6w6kn\") pod \"redhat-operators-2qs8n\" (UID: \"a7aa8b50-9d7d-4735-8e06-0931decf7a95\") " pod="openshift-marketplace/redhat-operators-2qs8n" Oct 09 09:50:57 crc kubenswrapper[4710]: I1009 09:50:57.501556 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2qs8n" Oct 09 09:50:57 crc kubenswrapper[4710]: I1009 09:50:57.932057 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2qs8n"] Oct 09 09:50:58 crc kubenswrapper[4710]: I1009 09:50:58.509901 4710 generic.go:334] "Generic (PLEG): container finished" podID="a7aa8b50-9d7d-4735-8e06-0931decf7a95" containerID="aeac23ad6705e7c0ce818d87025a5b16130bec7bd30cd7966981d6e84cf74202" exitCode=0 Oct 09 09:50:58 crc kubenswrapper[4710]: I1009 09:50:58.510027 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qs8n" event={"ID":"a7aa8b50-9d7d-4735-8e06-0931decf7a95","Type":"ContainerDied","Data":"aeac23ad6705e7c0ce818d87025a5b16130bec7bd30cd7966981d6e84cf74202"} Oct 09 09:50:58 crc kubenswrapper[4710]: I1009 09:50:58.510285 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qs8n" event={"ID":"a7aa8b50-9d7d-4735-8e06-0931decf7a95","Type":"ContainerStarted","Data":"389b6f1051b0af26b6c23671c31e31f8090fb9be90df73f14664c2f4e8305fbf"} Oct 09 09:50:59 crc kubenswrapper[4710]: I1009 09:50:59.535317 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qs8n" event={"ID":"a7aa8b50-9d7d-4735-8e06-0931decf7a95","Type":"ContainerStarted","Data":"3317b7afa5586ed3e1c71bc59b85047228609461190c0eab0f72490556b88baf"} Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.375976 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.378226 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.380027 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.380103 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.392610 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.479052 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.481024 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.485053 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.488799 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.548165 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-dev\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.548203 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-sys\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.548232 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.548248 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.548279 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.548322 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9bvg\" (UniqueName: \"kubernetes.io/projected/9c352150-914d-40e6-8eb2-ecbf97b33bbc-kube-api-access-t9bvg\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.548337 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.548372 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.548388 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c352150-914d-40e6-8eb2-ecbf97b33bbc-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.548415 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c352150-914d-40e6-8eb2-ecbf97b33bbc-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.548471 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9c352150-914d-40e6-8eb2-ecbf97b33bbc-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.548499 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.548521 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c352150-914d-40e6-8eb2-ecbf97b33bbc-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.548542 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.548593 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c352150-914d-40e6-8eb2-ecbf97b33bbc-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.548621 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-run\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.649977 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650016 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c352150-914d-40e6-8eb2-ecbf97b33bbc-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650036 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650057 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650085 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-etc-nvme\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650103 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650129 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/773237c7-043e-4e8c-a646-6a24ab6cf3d5-config-data-custom\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650149 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c352150-914d-40e6-8eb2-ecbf97b33bbc-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650165 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650183 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/773237c7-043e-4e8c-a646-6a24ab6cf3d5-scripts\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650197 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/773237c7-043e-4e8c-a646-6a24ab6cf3d5-ceph\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650212 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-lib-modules\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650225 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-run\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650246 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/773237c7-043e-4e8c-a646-6a24ab6cf3d5-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650261 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-dev\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650278 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-sys\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650292 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650307 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650324 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650345 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650363 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpm6d\" (UniqueName: \"kubernetes.io/projected/773237c7-043e-4e8c-a646-6a24ab6cf3d5-kube-api-access-vpm6d\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650388 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-run\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650405 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/773237c7-043e-4e8c-a646-6a24ab6cf3d5-config-data\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650421 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650451 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9bvg\" (UniqueName: \"kubernetes.io/projected/9c352150-914d-40e6-8eb2-ecbf97b33bbc-kube-api-access-t9bvg\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650466 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650481 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-sys\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650501 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-dev\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650520 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650536 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c352150-914d-40e6-8eb2-ecbf97b33bbc-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650557 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c352150-914d-40e6-8eb2-ecbf97b33bbc-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.650574 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9c352150-914d-40e6-8eb2-ecbf97b33bbc-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.651140 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-sys\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.651409 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.651507 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-run\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.651553 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-dev\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.651581 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.651601 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.651696 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.651795 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.652261 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.654010 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/9c352150-914d-40e6-8eb2-ecbf97b33bbc-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.657255 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9c352150-914d-40e6-8eb2-ecbf97b33bbc-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.657695 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c352150-914d-40e6-8eb2-ecbf97b33bbc-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.662562 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c352150-914d-40e6-8eb2-ecbf97b33bbc-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.667769 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c352150-914d-40e6-8eb2-ecbf97b33bbc-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.676739 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9c352150-914d-40e6-8eb2-ecbf97b33bbc-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.685409 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9bvg\" (UniqueName: \"kubernetes.io/projected/9c352150-914d-40e6-8eb2-ecbf97b33bbc-kube-api-access-t9bvg\") pod \"cinder-volume-volume1-0\" (UID: \"9c352150-914d-40e6-8eb2-ecbf97b33bbc\") " pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.692003 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.752903 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.753217 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-etc-nvme\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.753242 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.753089 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.753270 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/773237c7-043e-4e8c-a646-6a24ab6cf3d5-config-data-custom\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.753401 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.753463 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/773237c7-043e-4e8c-a646-6a24ab6cf3d5-scripts\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.753483 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/773237c7-043e-4e8c-a646-6a24ab6cf3d5-ceph\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.753508 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-lib-modules\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.753557 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/773237c7-043e-4e8c-a646-6a24ab6cf3d5-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.753624 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.753677 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpm6d\" (UniqueName: \"kubernetes.io/projected/773237c7-043e-4e8c-a646-6a24ab6cf3d5-kube-api-access-vpm6d\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.753727 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-run\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.753757 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/773237c7-043e-4e8c-a646-6a24ab6cf3d5-config-data\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.753781 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.753812 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-sys\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.753862 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-dev\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.754015 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-dev\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.754068 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-etc-nvme\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.754141 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.754172 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.754206 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.754589 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-run\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.755619 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.755703 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-lib-modules\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.758464 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/773237c7-043e-4e8c-a646-6a24ab6cf3d5-config-data-custom\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.758528 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/773237c7-043e-4e8c-a646-6a24ab6cf3d5-sys\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.758889 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/773237c7-043e-4e8c-a646-6a24ab6cf3d5-scripts\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.761529 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/773237c7-043e-4e8c-a646-6a24ab6cf3d5-ceph\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.762477 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/773237c7-043e-4e8c-a646-6a24ab6cf3d5-config-data\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.776633 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/773237c7-043e-4e8c-a646-6a24ab6cf3d5-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.785971 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpm6d\" (UniqueName: \"kubernetes.io/projected/773237c7-043e-4e8c-a646-6a24ab6cf3d5-kube-api-access-vpm6d\") pod \"cinder-backup-0\" (UID: \"773237c7-043e-4e8c-a646-6a24ab6cf3d5\") " pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.822278 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.902500 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-hwcfw"] Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.903875 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-hwcfw" Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.918164 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-hwcfw"] Oct 09 09:51:00 crc kubenswrapper[4710]: I1009 09:51:00.978749 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzrfs\" (UniqueName: \"kubernetes.io/projected/b5e73400-7814-42e4-879a-4193792ce918-kube-api-access-mzrfs\") pod \"manila-db-create-hwcfw\" (UID: \"b5e73400-7814-42e4-879a-4193792ce918\") " pod="openstack/manila-db-create-hwcfw" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.001527 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-55df4dcbd5-bmlgb"] Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.003712 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-55df4dcbd5-bmlgb" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.007992 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-jqn6s" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.007999 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.008139 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.008414 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.052542 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-55df4dcbd5-bmlgb"] Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.082665 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a092a4bc-968b-408e-a835-0180661e24e6-config-data\") pod \"horizon-55df4dcbd5-bmlgb\" (UID: \"a092a4bc-968b-408e-a835-0180661e24e6\") " pod="openstack/horizon-55df4dcbd5-bmlgb" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.082782 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgk9r\" (UniqueName: \"kubernetes.io/projected/a092a4bc-968b-408e-a835-0180661e24e6-kube-api-access-sgk9r\") pod \"horizon-55df4dcbd5-bmlgb\" (UID: \"a092a4bc-968b-408e-a835-0180661e24e6\") " pod="openstack/horizon-55df4dcbd5-bmlgb" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.082811 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a092a4bc-968b-408e-a835-0180661e24e6-logs\") pod \"horizon-55df4dcbd5-bmlgb\" (UID: \"a092a4bc-968b-408e-a835-0180661e24e6\") " pod="openstack/horizon-55df4dcbd5-bmlgb" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.082879 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a092a4bc-968b-408e-a835-0180661e24e6-horizon-secret-key\") pod \"horizon-55df4dcbd5-bmlgb\" (UID: \"a092a4bc-968b-408e-a835-0180661e24e6\") " pod="openstack/horizon-55df4dcbd5-bmlgb" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.082919 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a092a4bc-968b-408e-a835-0180661e24e6-scripts\") pod \"horizon-55df4dcbd5-bmlgb\" (UID: \"a092a4bc-968b-408e-a835-0180661e24e6\") " pod="openstack/horizon-55df4dcbd5-bmlgb" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.082942 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzrfs\" (UniqueName: \"kubernetes.io/projected/b5e73400-7814-42e4-879a-4193792ce918-kube-api-access-mzrfs\") pod \"manila-db-create-hwcfw\" (UID: \"b5e73400-7814-42e4-879a-4193792ce918\") " pod="openstack/manila-db-create-hwcfw" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.103831 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzrfs\" (UniqueName: \"kubernetes.io/projected/b5e73400-7814-42e4-879a-4193792ce918-kube-api-access-mzrfs\") pod \"manila-db-create-hwcfw\" (UID: \"b5e73400-7814-42e4-879a-4193792ce918\") " pod="openstack/manila-db-create-hwcfw" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.121342 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.123648 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.128930 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.129098 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-gcdnv" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.129251 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.129682 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.148214 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.181357 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-598b695687-fxzw5"] Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.182935 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-598b695687-fxzw5" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.184622 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a092a4bc-968b-408e-a835-0180661e24e6-config-data\") pod \"horizon-55df4dcbd5-bmlgb\" (UID: \"a092a4bc-968b-408e-a835-0180661e24e6\") " pod="openstack/horizon-55df4dcbd5-bmlgb" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.184799 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgk9r\" (UniqueName: \"kubernetes.io/projected/a092a4bc-968b-408e-a835-0180661e24e6-kube-api-access-sgk9r\") pod \"horizon-55df4dcbd5-bmlgb\" (UID: \"a092a4bc-968b-408e-a835-0180661e24e6\") " pod="openstack/horizon-55df4dcbd5-bmlgb" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.184838 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a092a4bc-968b-408e-a835-0180661e24e6-logs\") pod \"horizon-55df4dcbd5-bmlgb\" (UID: \"a092a4bc-968b-408e-a835-0180661e24e6\") " pod="openstack/horizon-55df4dcbd5-bmlgb" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.184884 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a092a4bc-968b-408e-a835-0180661e24e6-horizon-secret-key\") pod \"horizon-55df4dcbd5-bmlgb\" (UID: \"a092a4bc-968b-408e-a835-0180661e24e6\") " pod="openstack/horizon-55df4dcbd5-bmlgb" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.184945 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a092a4bc-968b-408e-a835-0180661e24e6-scripts\") pod \"horizon-55df4dcbd5-bmlgb\" (UID: \"a092a4bc-968b-408e-a835-0180661e24e6\") " pod="openstack/horizon-55df4dcbd5-bmlgb" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.185642 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a092a4bc-968b-408e-a835-0180661e24e6-scripts\") pod \"horizon-55df4dcbd5-bmlgb\" (UID: \"a092a4bc-968b-408e-a835-0180661e24e6\") " pod="openstack/horizon-55df4dcbd5-bmlgb" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.186584 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a092a4bc-968b-408e-a835-0180661e24e6-config-data\") pod \"horizon-55df4dcbd5-bmlgb\" (UID: \"a092a4bc-968b-408e-a835-0180661e24e6\") " pod="openstack/horizon-55df4dcbd5-bmlgb" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.186691 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a092a4bc-968b-408e-a835-0180661e24e6-logs\") pod \"horizon-55df4dcbd5-bmlgb\" (UID: \"a092a4bc-968b-408e-a835-0180661e24e6\") " pod="openstack/horizon-55df4dcbd5-bmlgb" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.191112 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a092a4bc-968b-408e-a835-0180661e24e6-horizon-secret-key\") pod \"horizon-55df4dcbd5-bmlgb\" (UID: \"a092a4bc-968b-408e-a835-0180661e24e6\") " pod="openstack/horizon-55df4dcbd5-bmlgb" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.213910 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgk9r\" (UniqueName: \"kubernetes.io/projected/a092a4bc-968b-408e-a835-0180661e24e6-kube-api-access-sgk9r\") pod \"horizon-55df4dcbd5-bmlgb\" (UID: \"a092a4bc-968b-408e-a835-0180661e24e6\") " pod="openstack/horizon-55df4dcbd5-bmlgb" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.222419 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-598b695687-fxzw5"] Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.235117 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-hwcfw" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.287483 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.289064 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.291564 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.294939 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.296257 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.298623 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-config-data\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.298709 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-ceph\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.298740 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.298788 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-scripts\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.298811 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-logs\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.298883 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.298931 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.298999 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.299030 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txxd4\" (UniqueName: \"kubernetes.io/projected/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-kube-api-access-txxd4\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.340687 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-55df4dcbd5-bmlgb" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.400840 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.401248 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-config-data\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.401378 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5643934e-9c27-457b-b0ad-5c8d5a635d26-logs\") pod \"horizon-598b695687-fxzw5\" (UID: \"5643934e-9c27-457b-b0ad-5c8d5a635d26\") " pod="openstack/horizon-598b695687-fxzw5" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.401481 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5643934e-9c27-457b-b0ad-5c8d5a635d26-horizon-secret-key\") pod \"horizon-598b695687-fxzw5\" (UID: \"5643934e-9c27-457b-b0ad-5c8d5a635d26\") " pod="openstack/horizon-598b695687-fxzw5" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.402201 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.402305 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.402397 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhgsq\" (UniqueName: \"kubernetes.io/projected/7480b380-bd58-49cc-90f3-ceb592465c3c-kube-api-access-vhgsq\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.402841 4710 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.422101 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.433565 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txxd4\" (UniqueName: \"kubernetes.io/projected/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-kube-api-access-txxd4\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.433746 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.436881 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wkfb\" (UniqueName: \"kubernetes.io/projected/5643934e-9c27-457b-b0ad-5c8d5a635d26-kube-api-access-5wkfb\") pod \"horizon-598b695687-fxzw5\" (UID: \"5643934e-9c27-457b-b0ad-5c8d5a635d26\") " pod="openstack/horizon-598b695687-fxzw5" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.436970 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5643934e-9c27-457b-b0ad-5c8d5a635d26-config-data\") pod \"horizon-598b695687-fxzw5\" (UID: \"5643934e-9c27-457b-b0ad-5c8d5a635d26\") " pod="openstack/horizon-598b695687-fxzw5" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.437005 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-config-data\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.437029 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-scripts\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.437090 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7480b380-bd58-49cc-90f3-ceb592465c3c-ceph\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.437143 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5643934e-9c27-457b-b0ad-5c8d5a635d26-scripts\") pod \"horizon-598b695687-fxzw5\" (UID: \"5643934e-9c27-457b-b0ad-5c8d5a635d26\") " pod="openstack/horizon-598b695687-fxzw5" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.437167 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-ceph\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.437185 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.437225 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.437274 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7480b380-bd58-49cc-90f3-ceb592465c3c-logs\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.437311 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-scripts\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.437340 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-logs\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.437401 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7480b380-bd58-49cc-90f3-ceb592465c3c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.437469 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.441060 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.441304 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-logs\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.448254 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txxd4\" (UniqueName: \"kubernetes.io/projected/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-kube-api-access-txxd4\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.449112 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-scripts\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.449779 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-ceph\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.452347 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.456032 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-config-data\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.466341 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.488268 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.539638 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhgsq\" (UniqueName: \"kubernetes.io/projected/7480b380-bd58-49cc-90f3-ceb592465c3c-kube-api-access-vhgsq\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.539695 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.539720 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wkfb\" (UniqueName: \"kubernetes.io/projected/5643934e-9c27-457b-b0ad-5c8d5a635d26-kube-api-access-5wkfb\") pod \"horizon-598b695687-fxzw5\" (UID: \"5643934e-9c27-457b-b0ad-5c8d5a635d26\") " pod="openstack/horizon-598b695687-fxzw5" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.539752 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5643934e-9c27-457b-b0ad-5c8d5a635d26-config-data\") pod \"horizon-598b695687-fxzw5\" (UID: \"5643934e-9c27-457b-b0ad-5c8d5a635d26\") " pod="openstack/horizon-598b695687-fxzw5" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.539772 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-scripts\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.539798 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7480b380-bd58-49cc-90f3-ceb592465c3c-ceph\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.539821 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5643934e-9c27-457b-b0ad-5c8d5a635d26-scripts\") pod \"horizon-598b695687-fxzw5\" (UID: \"5643934e-9c27-457b-b0ad-5c8d5a635d26\") " pod="openstack/horizon-598b695687-fxzw5" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.539842 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.539879 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7480b380-bd58-49cc-90f3-ceb592465c3c-logs\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.539913 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7480b380-bd58-49cc-90f3-ceb592465c3c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.539953 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-config-data\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.539980 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5643934e-9c27-457b-b0ad-5c8d5a635d26-logs\") pod \"horizon-598b695687-fxzw5\" (UID: \"5643934e-9c27-457b-b0ad-5c8d5a635d26\") " pod="openstack/horizon-598b695687-fxzw5" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.539996 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5643934e-9c27-457b-b0ad-5c8d5a635d26-horizon-secret-key\") pod \"horizon-598b695687-fxzw5\" (UID: \"5643934e-9c27-457b-b0ad-5c8d5a635d26\") " pod="openstack/horizon-598b695687-fxzw5" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.540013 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.541544 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5643934e-9c27-457b-b0ad-5c8d5a635d26-scripts\") pod \"horizon-598b695687-fxzw5\" (UID: \"5643934e-9c27-457b-b0ad-5c8d5a635d26\") " pod="openstack/horizon-598b695687-fxzw5" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.542341 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7480b380-bd58-49cc-90f3-ceb592465c3c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.542445 4710 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.542928 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7480b380-bd58-49cc-90f3-ceb592465c3c-logs\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.543186 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5643934e-9c27-457b-b0ad-5c8d5a635d26-logs\") pod \"horizon-598b695687-fxzw5\" (UID: \"5643934e-9c27-457b-b0ad-5c8d5a635d26\") " pod="openstack/horizon-598b695687-fxzw5" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.547010 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5643934e-9c27-457b-b0ad-5c8d5a635d26-config-data\") pod \"horizon-598b695687-fxzw5\" (UID: \"5643934e-9c27-457b-b0ad-5c8d5a635d26\") " pod="openstack/horizon-598b695687-fxzw5" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.547760 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.552739 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.553063 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-config-data\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.553985 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7480b380-bd58-49cc-90f3-ceb592465c3c-ceph\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.557130 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5643934e-9c27-457b-b0ad-5c8d5a635d26-horizon-secret-key\") pod \"horizon-598b695687-fxzw5\" (UID: \"5643934e-9c27-457b-b0ad-5c8d5a635d26\") " pod="openstack/horizon-598b695687-fxzw5" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.562276 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-scripts\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.563013 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhgsq\" (UniqueName: \"kubernetes.io/projected/7480b380-bd58-49cc-90f3-ceb592465c3c-kube-api-access-vhgsq\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.564412 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"9c352150-914d-40e6-8eb2-ecbf97b33bbc","Type":"ContainerStarted","Data":"06b17402977ca338417375fabeedd856f4b9ea4ab4b51e37b0c48ce5052c8234"} Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.565981 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wkfb\" (UniqueName: \"kubernetes.io/projected/5643934e-9c27-457b-b0ad-5c8d5a635d26-kube-api-access-5wkfb\") pod \"horizon-598b695687-fxzw5\" (UID: \"5643934e-9c27-457b-b0ad-5c8d5a635d26\") " pod="openstack/horizon-598b695687-fxzw5" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.581016 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.630812 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.670150 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Oct 09 09:51:01 crc kubenswrapper[4710]: W1009 09:51:01.692003 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod773237c7_043e_4e8c_a646_6a24ab6cf3d5.slice/crio-b5a02812415dc669436ca9b627351feb67e178b3013897d1158710d00ffb741d WatchSource:0}: Error finding container b5a02812415dc669436ca9b627351feb67e178b3013897d1158710d00ffb741d: Status 404 returned error can't find the container with id b5a02812415dc669436ca9b627351feb67e178b3013897d1158710d00ffb741d Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.747037 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.798996 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-598b695687-fxzw5" Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.853094 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-hwcfw"] Oct 09 09:51:01 crc kubenswrapper[4710]: I1009 09:51:01.948251 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-55df4dcbd5-bmlgb"] Oct 09 09:51:02 crc kubenswrapper[4710]: I1009 09:51:02.334458 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 09:51:02 crc kubenswrapper[4710]: I1009 09:51:02.451705 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 09:51:02 crc kubenswrapper[4710]: W1009 09:51:02.454139 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e6c3490_3e7a_4eb2_95a1_6a6dd5e49172.slice/crio-c2c963a2e4f198b506b469879890a91fe3bd149fd49815adbc0013e069af452f WatchSource:0}: Error finding container c2c963a2e4f198b506b469879890a91fe3bd149fd49815adbc0013e069af452f: Status 404 returned error can't find the container with id c2c963a2e4f198b506b469879890a91fe3bd149fd49815adbc0013e069af452f Oct 09 09:51:02 crc kubenswrapper[4710]: I1009 09:51:02.483420 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-598b695687-fxzw5"] Oct 09 09:51:02 crc kubenswrapper[4710]: W1009 09:51:02.511303 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5643934e_9c27_457b_b0ad_5c8d5a635d26.slice/crio-18067ecd7f01c76c0c915baf416effdd48e6899eecb76ac8b60a6415f1981cfb WatchSource:0}: Error finding container 18067ecd7f01c76c0c915baf416effdd48e6899eecb76ac8b60a6415f1981cfb: Status 404 returned error can't find the container with id 18067ecd7f01c76c0c915baf416effdd48e6899eecb76ac8b60a6415f1981cfb Oct 09 09:51:02 crc kubenswrapper[4710]: I1009 09:51:02.583056 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55df4dcbd5-bmlgb" event={"ID":"a092a4bc-968b-408e-a835-0180661e24e6","Type":"ContainerStarted","Data":"ed027f9f95d1c34c739578a40c990e2039366f1d5f89543aeb54eebe56c07fba"} Oct 09 09:51:02 crc kubenswrapper[4710]: I1009 09:51:02.587595 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-598b695687-fxzw5" event={"ID":"5643934e-9c27-457b-b0ad-5c8d5a635d26","Type":"ContainerStarted","Data":"18067ecd7f01c76c0c915baf416effdd48e6899eecb76ac8b60a6415f1981cfb"} Oct 09 09:51:02 crc kubenswrapper[4710]: I1009 09:51:02.589540 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172","Type":"ContainerStarted","Data":"c2c963a2e4f198b506b469879890a91fe3bd149fd49815adbc0013e069af452f"} Oct 09 09:51:02 crc kubenswrapper[4710]: I1009 09:51:02.591935 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"773237c7-043e-4e8c-a646-6a24ab6cf3d5","Type":"ContainerStarted","Data":"b5a02812415dc669436ca9b627351feb67e178b3013897d1158710d00ffb741d"} Oct 09 09:51:02 crc kubenswrapper[4710]: I1009 09:51:02.594302 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7480b380-bd58-49cc-90f3-ceb592465c3c","Type":"ContainerStarted","Data":"d1181cf5ab7a7be3b2567ef506c1825cc7e429ebf704d44a6b26f00d2aea878b"} Oct 09 09:51:02 crc kubenswrapper[4710]: I1009 09:51:02.599162 4710 generic.go:334] "Generic (PLEG): container finished" podID="a7aa8b50-9d7d-4735-8e06-0931decf7a95" containerID="3317b7afa5586ed3e1c71bc59b85047228609461190c0eab0f72490556b88baf" exitCode=0 Oct 09 09:51:02 crc kubenswrapper[4710]: I1009 09:51:02.599232 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qs8n" event={"ID":"a7aa8b50-9d7d-4735-8e06-0931decf7a95","Type":"ContainerDied","Data":"3317b7afa5586ed3e1c71bc59b85047228609461190c0eab0f72490556b88baf"} Oct 09 09:51:02 crc kubenswrapper[4710]: I1009 09:51:02.602915 4710 generic.go:334] "Generic (PLEG): container finished" podID="b5e73400-7814-42e4-879a-4193792ce918" containerID="559f6b680076735a0530b5807453e2c706113d1d831e465ae6fe57c8b08bedfe" exitCode=0 Oct 09 09:51:02 crc kubenswrapper[4710]: I1009 09:51:02.603084 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-hwcfw" event={"ID":"b5e73400-7814-42e4-879a-4193792ce918","Type":"ContainerDied","Data":"559f6b680076735a0530b5807453e2c706113d1d831e465ae6fe57c8b08bedfe"} Oct 09 09:51:02 crc kubenswrapper[4710]: I1009 09:51:02.603101 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-hwcfw" event={"ID":"b5e73400-7814-42e4-879a-4193792ce918","Type":"ContainerStarted","Data":"a6e819c7a0006176b1a78263215786e6ae800339fd917da39907ce02a07c9477"} Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.495078 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-55df4dcbd5-bmlgb"] Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.535347 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.583210 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-dd45d474-nc99p"] Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.611080 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-dd45d474-nc99p"] Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.611115 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-598b695687-fxzw5"] Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.611208 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.616817 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.655003 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.718132 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-67d94d7dc8-fvmp7"] Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.723163 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.731088 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-67d94d7dc8-fvmp7"] Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.755593 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"773237c7-043e-4e8c-a646-6a24ab6cf3d5","Type":"ContainerStarted","Data":"2e1e4ba90430361dfaa6fdd4d16a39929a190cb99b226a7774677168af59a653"} Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.763241 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c574f93a-f78c-43c0-a159-797171f39d0b-combined-ca-bundle\") pod \"horizon-dd45d474-nc99p\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.763293 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c574f93a-f78c-43c0-a159-797171f39d0b-logs\") pod \"horizon-dd45d474-nc99p\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.763339 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c574f93a-f78c-43c0-a159-797171f39d0b-horizon-tls-certs\") pod \"horizon-dd45d474-nc99p\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.763410 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c574f93a-f78c-43c0-a159-797171f39d0b-scripts\") pod \"horizon-dd45d474-nc99p\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.763451 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzhcs\" (UniqueName: \"kubernetes.io/projected/c574f93a-f78c-43c0-a159-797171f39d0b-kube-api-access-fzhcs\") pod \"horizon-dd45d474-nc99p\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.763475 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c574f93a-f78c-43c0-a159-797171f39d0b-horizon-secret-key\") pod \"horizon-dd45d474-nc99p\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.763494 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c574f93a-f78c-43c0-a159-797171f39d0b-config-data\") pod \"horizon-dd45d474-nc99p\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.800606 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7480b380-bd58-49cc-90f3-ceb592465c3c","Type":"ContainerStarted","Data":"1638be628c4e65ed93562fefa6b9b576dc7ecb9d3528b90e8554d18c096bfed5"} Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.810661 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qs8n" event={"ID":"a7aa8b50-9d7d-4735-8e06-0931decf7a95","Type":"ContainerStarted","Data":"7b9382177ae66a914ac271068f1d130beceb2f316659ef3a48c42e8a111086bd"} Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.822500 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"9c352150-914d-40e6-8eb2-ecbf97b33bbc","Type":"ContainerStarted","Data":"f5de0dcc6e04142854f511a8fb42376af76b94ced499d6154be068dee5a1a66b"} Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.833372 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172","Type":"ContainerStarted","Data":"ab5e922809316cd3112c1161e1423187736dd947cee25c6000fb681d70b72771"} Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.846272 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2qs8n" podStartSLOduration=2.28237557 podStartE2EDuration="6.846262617s" podCreationTimestamp="2025-10-09 09:50:57 +0000 UTC" firstStartedPulling="2025-10-09 09:50:58.511642238 +0000 UTC m=+2782.001750634" lastFinishedPulling="2025-10-09 09:51:03.075529285 +0000 UTC m=+2786.565637681" observedRunningTime="2025-10-09 09:51:03.845634001 +0000 UTC m=+2787.335742399" watchObservedRunningTime="2025-10-09 09:51:03.846262617 +0000 UTC m=+2787.336371014" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.864811 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsx4f\" (UniqueName: \"kubernetes.io/projected/13eb4841-8d3a-4ef6-a2da-656bab482ab4-kube-api-access-gsx4f\") pod \"horizon-67d94d7dc8-fvmp7\" (UID: \"13eb4841-8d3a-4ef6-a2da-656bab482ab4\") " pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.864896 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/13eb4841-8d3a-4ef6-a2da-656bab482ab4-horizon-secret-key\") pod \"horizon-67d94d7dc8-fvmp7\" (UID: \"13eb4841-8d3a-4ef6-a2da-656bab482ab4\") " pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.864926 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c574f93a-f78c-43c0-a159-797171f39d0b-combined-ca-bundle\") pod \"horizon-dd45d474-nc99p\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.864952 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13eb4841-8d3a-4ef6-a2da-656bab482ab4-combined-ca-bundle\") pod \"horizon-67d94d7dc8-fvmp7\" (UID: \"13eb4841-8d3a-4ef6-a2da-656bab482ab4\") " pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.864994 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c574f93a-f78c-43c0-a159-797171f39d0b-logs\") pod \"horizon-dd45d474-nc99p\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.865017 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13eb4841-8d3a-4ef6-a2da-656bab482ab4-logs\") pod \"horizon-67d94d7dc8-fvmp7\" (UID: \"13eb4841-8d3a-4ef6-a2da-656bab482ab4\") " pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.865040 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c574f93a-f78c-43c0-a159-797171f39d0b-horizon-tls-certs\") pod \"horizon-dd45d474-nc99p\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.865092 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c574f93a-f78c-43c0-a159-797171f39d0b-scripts\") pod \"horizon-dd45d474-nc99p\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.865125 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzhcs\" (UniqueName: \"kubernetes.io/projected/c574f93a-f78c-43c0-a159-797171f39d0b-kube-api-access-fzhcs\") pod \"horizon-dd45d474-nc99p\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.865144 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c574f93a-f78c-43c0-a159-797171f39d0b-horizon-secret-key\") pod \"horizon-dd45d474-nc99p\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.865163 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c574f93a-f78c-43c0-a159-797171f39d0b-config-data\") pod \"horizon-dd45d474-nc99p\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.865267 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/13eb4841-8d3a-4ef6-a2da-656bab482ab4-scripts\") pod \"horizon-67d94d7dc8-fvmp7\" (UID: \"13eb4841-8d3a-4ef6-a2da-656bab482ab4\") " pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.865303 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/13eb4841-8d3a-4ef6-a2da-656bab482ab4-horizon-tls-certs\") pod \"horizon-67d94d7dc8-fvmp7\" (UID: \"13eb4841-8d3a-4ef6-a2da-656bab482ab4\") " pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.865317 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/13eb4841-8d3a-4ef6-a2da-656bab482ab4-config-data\") pod \"horizon-67d94d7dc8-fvmp7\" (UID: \"13eb4841-8d3a-4ef6-a2da-656bab482ab4\") " pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.866170 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c574f93a-f78c-43c0-a159-797171f39d0b-scripts\") pod \"horizon-dd45d474-nc99p\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.866949 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c574f93a-f78c-43c0-a159-797171f39d0b-logs\") pod \"horizon-dd45d474-nc99p\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.868576 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c574f93a-f78c-43c0-a159-797171f39d0b-config-data\") pod \"horizon-dd45d474-nc99p\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.942886 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c574f93a-f78c-43c0-a159-797171f39d0b-horizon-secret-key\") pod \"horizon-dd45d474-nc99p\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.963075 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c574f93a-f78c-43c0-a159-797171f39d0b-horizon-tls-certs\") pod \"horizon-dd45d474-nc99p\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.963534 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c574f93a-f78c-43c0-a159-797171f39d0b-combined-ca-bundle\") pod \"horizon-dd45d474-nc99p\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.969301 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/13eb4841-8d3a-4ef6-a2da-656bab482ab4-scripts\") pod \"horizon-67d94d7dc8-fvmp7\" (UID: \"13eb4841-8d3a-4ef6-a2da-656bab482ab4\") " pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.969344 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/13eb4841-8d3a-4ef6-a2da-656bab482ab4-config-data\") pod \"horizon-67d94d7dc8-fvmp7\" (UID: \"13eb4841-8d3a-4ef6-a2da-656bab482ab4\") " pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.969363 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/13eb4841-8d3a-4ef6-a2da-656bab482ab4-horizon-tls-certs\") pod \"horizon-67d94d7dc8-fvmp7\" (UID: \"13eb4841-8d3a-4ef6-a2da-656bab482ab4\") " pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.969393 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsx4f\" (UniqueName: \"kubernetes.io/projected/13eb4841-8d3a-4ef6-a2da-656bab482ab4-kube-api-access-gsx4f\") pod \"horizon-67d94d7dc8-fvmp7\" (UID: \"13eb4841-8d3a-4ef6-a2da-656bab482ab4\") " pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.969456 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/13eb4841-8d3a-4ef6-a2da-656bab482ab4-horizon-secret-key\") pod \"horizon-67d94d7dc8-fvmp7\" (UID: \"13eb4841-8d3a-4ef6-a2da-656bab482ab4\") " pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.969489 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13eb4841-8d3a-4ef6-a2da-656bab482ab4-combined-ca-bundle\") pod \"horizon-67d94d7dc8-fvmp7\" (UID: \"13eb4841-8d3a-4ef6-a2da-656bab482ab4\") " pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.969534 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13eb4841-8d3a-4ef6-a2da-656bab482ab4-logs\") pod \"horizon-67d94d7dc8-fvmp7\" (UID: \"13eb4841-8d3a-4ef6-a2da-656bab482ab4\") " pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.969930 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13eb4841-8d3a-4ef6-a2da-656bab482ab4-logs\") pod \"horizon-67d94d7dc8-fvmp7\" (UID: \"13eb4841-8d3a-4ef6-a2da-656bab482ab4\") " pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.970388 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/13eb4841-8d3a-4ef6-a2da-656bab482ab4-scripts\") pod \"horizon-67d94d7dc8-fvmp7\" (UID: \"13eb4841-8d3a-4ef6-a2da-656bab482ab4\") " pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:03 crc kubenswrapper[4710]: I1009 09:51:03.977918 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/13eb4841-8d3a-4ef6-a2da-656bab482ab4-config-data\") pod \"horizon-67d94d7dc8-fvmp7\" (UID: \"13eb4841-8d3a-4ef6-a2da-656bab482ab4\") " pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:04 crc kubenswrapper[4710]: I1009 09:51:03.999701 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/13eb4841-8d3a-4ef6-a2da-656bab482ab4-horizon-secret-key\") pod \"horizon-67d94d7dc8-fvmp7\" (UID: \"13eb4841-8d3a-4ef6-a2da-656bab482ab4\") " pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:04 crc kubenswrapper[4710]: I1009 09:51:04.001963 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/13eb4841-8d3a-4ef6-a2da-656bab482ab4-horizon-tls-certs\") pod \"horizon-67d94d7dc8-fvmp7\" (UID: \"13eb4841-8d3a-4ef6-a2da-656bab482ab4\") " pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:04 crc kubenswrapper[4710]: I1009 09:51:04.025073 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzhcs\" (UniqueName: \"kubernetes.io/projected/c574f93a-f78c-43c0-a159-797171f39d0b-kube-api-access-fzhcs\") pod \"horizon-dd45d474-nc99p\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:04 crc kubenswrapper[4710]: I1009 09:51:04.026897 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13eb4841-8d3a-4ef6-a2da-656bab482ab4-combined-ca-bundle\") pod \"horizon-67d94d7dc8-fvmp7\" (UID: \"13eb4841-8d3a-4ef6-a2da-656bab482ab4\") " pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:04 crc kubenswrapper[4710]: I1009 09:51:04.044711 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsx4f\" (UniqueName: \"kubernetes.io/projected/13eb4841-8d3a-4ef6-a2da-656bab482ab4-kube-api-access-gsx4f\") pod \"horizon-67d94d7dc8-fvmp7\" (UID: \"13eb4841-8d3a-4ef6-a2da-656bab482ab4\") " pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:04 crc kubenswrapper[4710]: I1009 09:51:04.277698 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:04 crc kubenswrapper[4710]: I1009 09:51:04.338252 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:04 crc kubenswrapper[4710]: I1009 09:51:04.768549 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-hwcfw" Oct 09 09:51:04 crc kubenswrapper[4710]: I1009 09:51:04.881014 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"9c352150-914d-40e6-8eb2-ecbf97b33bbc","Type":"ContainerStarted","Data":"7dbe0a1ba56549d510b74d7e63abd29a394b90f61932e0248bc17409f144138f"} Oct 09 09:51:04 crc kubenswrapper[4710]: I1009 09:51:04.910649 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mzrfs\" (UniqueName: \"kubernetes.io/projected/b5e73400-7814-42e4-879a-4193792ce918-kube-api-access-mzrfs\") pod \"b5e73400-7814-42e4-879a-4193792ce918\" (UID: \"b5e73400-7814-42e4-879a-4193792ce918\") " Oct 09 09:51:04 crc kubenswrapper[4710]: I1009 09:51:04.913622 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172","Type":"ContainerStarted","Data":"e690a16d810a1403cac8301d1225487d496bbca343d4ea57f0e759a20ccbbb45"} Oct 09 09:51:04 crc kubenswrapper[4710]: I1009 09:51:04.913762 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172" containerName="glance-log" containerID="cri-o://ab5e922809316cd3112c1161e1423187736dd947cee25c6000fb681d70b72771" gracePeriod=30 Oct 09 09:51:04 crc kubenswrapper[4710]: I1009 09:51:04.914228 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172" containerName="glance-httpd" containerID="cri-o://e690a16d810a1403cac8301d1225487d496bbca343d4ea57f0e759a20ccbbb45" gracePeriod=30 Oct 09 09:51:04 crc kubenswrapper[4710]: I1009 09:51:04.920280 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"773237c7-043e-4e8c-a646-6a24ab6cf3d5","Type":"ContainerStarted","Data":"f5c3ff8a45a6f40ef64b23df80a9757e8dab29f25254750d13cc1fb30588b3b1"} Oct 09 09:51:04 crc kubenswrapper[4710]: I1009 09:51:04.928351 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5e73400-7814-42e4-879a-4193792ce918-kube-api-access-mzrfs" (OuterVolumeSpecName: "kube-api-access-mzrfs") pod "b5e73400-7814-42e4-879a-4193792ce918" (UID: "b5e73400-7814-42e4-879a-4193792ce918"). InnerVolumeSpecName "kube-api-access-mzrfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:51:04 crc kubenswrapper[4710]: I1009 09:51:04.937044 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=3.414038164 podStartE2EDuration="4.937023167s" podCreationTimestamp="2025-10-09 09:51:00 +0000 UTC" firstStartedPulling="2025-10-09 09:51:01.49668704 +0000 UTC m=+2784.986795437" lastFinishedPulling="2025-10-09 09:51:03.019672043 +0000 UTC m=+2786.509780440" observedRunningTime="2025-10-09 09:51:04.910084262 +0000 UTC m=+2788.400192660" watchObservedRunningTime="2025-10-09 09:51:04.937023167 +0000 UTC m=+2788.427131563" Oct 09 09:51:04 crc kubenswrapper[4710]: I1009 09:51:04.946644 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7480b380-bd58-49cc-90f3-ceb592465c3c","Type":"ContainerStarted","Data":"9de14e1572c043c91317dc72410aa0a38b96e58c556d24cc85d3e151de4c8bd3"} Oct 09 09:51:04 crc kubenswrapper[4710]: I1009 09:51:04.946939 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="7480b380-bd58-49cc-90f3-ceb592465c3c" containerName="glance-log" containerID="cri-o://1638be628c4e65ed93562fefa6b9b576dc7ecb9d3528b90e8554d18c096bfed5" gracePeriod=30 Oct 09 09:51:04 crc kubenswrapper[4710]: I1009 09:51:04.947038 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="7480b380-bd58-49cc-90f3-ceb592465c3c" containerName="glance-httpd" containerID="cri-o://9de14e1572c043c91317dc72410aa0a38b96e58c556d24cc85d3e151de4c8bd3" gracePeriod=30 Oct 09 09:51:04 crc kubenswrapper[4710]: I1009 09:51:04.976926 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.97690787 podStartE2EDuration="3.97690787s" podCreationTimestamp="2025-10-09 09:51:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:51:04.953198136 +0000 UTC m=+2788.443306534" watchObservedRunningTime="2025-10-09 09:51:04.97690787 +0000 UTC m=+2788.467016267" Oct 09 09:51:04 crc kubenswrapper[4710]: I1009 09:51:04.989914 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=3.634517527 podStartE2EDuration="4.989902651s" podCreationTimestamp="2025-10-09 09:51:00 +0000 UTC" firstStartedPulling="2025-10-09 09:51:01.694615267 +0000 UTC m=+2785.184723663" lastFinishedPulling="2025-10-09 09:51:03.05000039 +0000 UTC m=+2786.540108787" observedRunningTime="2025-10-09 09:51:04.984651697 +0000 UTC m=+2788.474760094" watchObservedRunningTime="2025-10-09 09:51:04.989902651 +0000 UTC m=+2788.480011049" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:04.998496 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-hwcfw" event={"ID":"b5e73400-7814-42e4-879a-4193792ce918","Type":"ContainerDied","Data":"a6e819c7a0006176b1a78263215786e6ae800339fd917da39907ce02a07c9477"} Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:04.998544 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6e819c7a0006176b1a78263215786e6ae800339fd917da39907ce02a07c9477" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:04.998653 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-hwcfw" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.014005 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mzrfs\" (UniqueName: \"kubernetes.io/projected/b5e73400-7814-42e4-879a-4193792ce918-kube-api-access-mzrfs\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.019975 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.019966039 podStartE2EDuration="4.019966039s" podCreationTimestamp="2025-10-09 09:51:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:51:05.009816194 +0000 UTC m=+2788.499924591" watchObservedRunningTime="2025-10-09 09:51:05.019966039 +0000 UTC m=+2788.510074436" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.133325 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-dd45d474-nc99p"] Oct 09 09:51:05 crc kubenswrapper[4710]: W1009 09:51:05.172180 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc574f93a_f78c_43c0_a159_797171f39d0b.slice/crio-2259e5a0358432dc8d51e0dbf4dfc963f3c923b648aa863b1b276710a285e3b1 WatchSource:0}: Error finding container 2259e5a0358432dc8d51e0dbf4dfc963f3c923b648aa863b1b276710a285e3b1: Status 404 returned error can't find the container with id 2259e5a0358432dc8d51e0dbf4dfc963f3c923b648aa863b1b276710a285e3b1 Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.378652 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-67d94d7dc8-fvmp7"] Oct 09 09:51:05 crc kubenswrapper[4710]: W1009 09:51:05.385668 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13eb4841_8d3a_4ef6_a2da_656bab482ab4.slice/crio-eee485937c2eea34119a42c1e6978a0ba4998a96e4a1e1b148faa04d75d07eb5 WatchSource:0}: Error finding container eee485937c2eea34119a42c1e6978a0ba4998a96e4a1e1b148faa04d75d07eb5: Status 404 returned error can't find the container with id eee485937c2eea34119a42c1e6978a0ba4998a96e4a1e1b148faa04d75d07eb5 Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.595682 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.630310 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-ceph\") pod \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.630378 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txxd4\" (UniqueName: \"kubernetes.io/projected/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-kube-api-access-txxd4\") pod \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.630412 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.630500 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-scripts\") pod \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.630670 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-internal-tls-certs\") pod \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.630687 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-combined-ca-bundle\") pod \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.630770 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-httpd-run\") pod \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.630833 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-logs\") pod \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.630908 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-config-data\") pod \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\" (UID: \"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172\") " Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.634505 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-logs" (OuterVolumeSpecName: "logs") pod "1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172" (UID: "1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.634883 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172" (UID: "1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.658604 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-ceph" (OuterVolumeSpecName: "ceph") pod "1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172" (UID: "1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.658789 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-scripts" (OuterVolumeSpecName: "scripts") pod "1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172" (UID: "1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.670495 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-kube-api-access-txxd4" (OuterVolumeSpecName: "kube-api-access-txxd4") pod "1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172" (UID: "1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172"). InnerVolumeSpecName "kube-api-access-txxd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.697042 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.700568 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172" (UID: "1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.718802 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172" (UID: "1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.724637 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172" (UID: "1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.734040 4710 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.734068 4710 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-logs\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.734077 4710 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.734087 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txxd4\" (UniqueName: \"kubernetes.io/projected/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-kube-api-access-txxd4\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.734109 4710 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.734118 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.734127 4710 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.734135 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.741547 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-config-data" (OuterVolumeSpecName: "config-data") pod "1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172" (UID: "1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.795482 4710 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.822911 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.835877 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.835905 4710 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:05 crc kubenswrapper[4710]: I1009 09:51:05.973753 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.024333 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67d94d7dc8-fvmp7" event={"ID":"13eb4841-8d3a-4ef6-a2da-656bab482ab4","Type":"ContainerStarted","Data":"eee485937c2eea34119a42c1e6978a0ba4998a96e4a1e1b148faa04d75d07eb5"} Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.035870 4710 generic.go:334] "Generic (PLEG): container finished" podID="1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172" containerID="e690a16d810a1403cac8301d1225487d496bbca343d4ea57f0e759a20ccbbb45" exitCode=143 Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.035901 4710 generic.go:334] "Generic (PLEG): container finished" podID="1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172" containerID="ab5e922809316cd3112c1161e1423187736dd947cee25c6000fb681d70b72771" exitCode=143 Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.035955 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172","Type":"ContainerDied","Data":"e690a16d810a1403cac8301d1225487d496bbca343d4ea57f0e759a20ccbbb45"} Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.035981 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172","Type":"ContainerDied","Data":"ab5e922809316cd3112c1161e1423187736dd947cee25c6000fb681d70b72771"} Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.035992 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172","Type":"ContainerDied","Data":"c2c963a2e4f198b506b469879890a91fe3bd149fd49815adbc0013e069af452f"} Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.036014 4710 scope.go:117] "RemoveContainer" containerID="e690a16d810a1403cac8301d1225487d496bbca343d4ea57f0e759a20ccbbb45" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.036183 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.038967 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dd45d474-nc99p" event={"ID":"c574f93a-f78c-43c0-a159-797171f39d0b","Type":"ContainerStarted","Data":"2259e5a0358432dc8d51e0dbf4dfc963f3c923b648aa863b1b276710a285e3b1"} Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.041835 4710 generic.go:334] "Generic (PLEG): container finished" podID="7480b380-bd58-49cc-90f3-ceb592465c3c" containerID="9de14e1572c043c91317dc72410aa0a38b96e58c556d24cc85d3e151de4c8bd3" exitCode=143 Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.041874 4710 generic.go:334] "Generic (PLEG): container finished" podID="7480b380-bd58-49cc-90f3-ceb592465c3c" containerID="1638be628c4e65ed93562fefa6b9b576dc7ecb9d3528b90e8554d18c096bfed5" exitCode=143 Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.041901 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.041962 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7480b380-bd58-49cc-90f3-ceb592465c3c","Type":"ContainerDied","Data":"9de14e1572c043c91317dc72410aa0a38b96e58c556d24cc85d3e151de4c8bd3"} Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.041990 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7480b380-bd58-49cc-90f3-ceb592465c3c","Type":"ContainerDied","Data":"1638be628c4e65ed93562fefa6b9b576dc7ecb9d3528b90e8554d18c096bfed5"} Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.042000 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7480b380-bd58-49cc-90f3-ceb592465c3c","Type":"ContainerDied","Data":"d1181cf5ab7a7be3b2567ef506c1825cc7e429ebf704d44a6b26f00d2aea878b"} Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.042515 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-public-tls-certs\") pod \"7480b380-bd58-49cc-90f3-ceb592465c3c\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.042661 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"7480b380-bd58-49cc-90f3-ceb592465c3c\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.042696 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-scripts\") pod \"7480b380-bd58-49cc-90f3-ceb592465c3c\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.042780 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-config-data\") pod \"7480b380-bd58-49cc-90f3-ceb592465c3c\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.042924 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7480b380-bd58-49cc-90f3-ceb592465c3c-logs\") pod \"7480b380-bd58-49cc-90f3-ceb592465c3c\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.042980 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhgsq\" (UniqueName: \"kubernetes.io/projected/7480b380-bd58-49cc-90f3-ceb592465c3c-kube-api-access-vhgsq\") pod \"7480b380-bd58-49cc-90f3-ceb592465c3c\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.043007 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-combined-ca-bundle\") pod \"7480b380-bd58-49cc-90f3-ceb592465c3c\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.043025 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7480b380-bd58-49cc-90f3-ceb592465c3c-httpd-run\") pod \"7480b380-bd58-49cc-90f3-ceb592465c3c\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.043227 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7480b380-bd58-49cc-90f3-ceb592465c3c-ceph\") pod \"7480b380-bd58-49cc-90f3-ceb592465c3c\" (UID: \"7480b380-bd58-49cc-90f3-ceb592465c3c\") " Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.048757 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7480b380-bd58-49cc-90f3-ceb592465c3c-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "7480b380-bd58-49cc-90f3-ceb592465c3c" (UID: "7480b380-bd58-49cc-90f3-ceb592465c3c"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.049036 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7480b380-bd58-49cc-90f3-ceb592465c3c-logs" (OuterVolumeSpecName: "logs") pod "7480b380-bd58-49cc-90f3-ceb592465c3c" (UID: "7480b380-bd58-49cc-90f3-ceb592465c3c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.049171 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "7480b380-bd58-49cc-90f3-ceb592465c3c" (UID: "7480b380-bd58-49cc-90f3-ceb592465c3c"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.061693 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-scripts" (OuterVolumeSpecName: "scripts") pod "7480b380-bd58-49cc-90f3-ceb592465c3c" (UID: "7480b380-bd58-49cc-90f3-ceb592465c3c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.061794 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7480b380-bd58-49cc-90f3-ceb592465c3c-ceph" (OuterVolumeSpecName: "ceph") pod "7480b380-bd58-49cc-90f3-ceb592465c3c" (UID: "7480b380-bd58-49cc-90f3-ceb592465c3c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.074555 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7480b380-bd58-49cc-90f3-ceb592465c3c-kube-api-access-vhgsq" (OuterVolumeSpecName: "kube-api-access-vhgsq") pod "7480b380-bd58-49cc-90f3-ceb592465c3c" (UID: "7480b380-bd58-49cc-90f3-ceb592465c3c"). InnerVolumeSpecName "kube-api-access-vhgsq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.074669 4710 scope.go:117] "RemoveContainer" containerID="ab5e922809316cd3112c1161e1423187736dd947cee25c6000fb681d70b72771" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.093878 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.098192 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.156375 4710 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7480b380-bd58-49cc-90f3-ceb592465c3c-logs\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.156403 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhgsq\" (UniqueName: \"kubernetes.io/projected/7480b380-bd58-49cc-90f3-ceb592465c3c-kube-api-access-vhgsq\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.156416 4710 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7480b380-bd58-49cc-90f3-ceb592465c3c-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.157534 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7480b380-bd58-49cc-90f3-ceb592465c3c" (UID: "7480b380-bd58-49cc-90f3-ceb592465c3c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.160932 4710 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7480b380-bd58-49cc-90f3-ceb592465c3c-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.160976 4710 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.160986 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.202892 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 09:51:06 crc kubenswrapper[4710]: E1009 09:51:06.203557 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7480b380-bd58-49cc-90f3-ceb592465c3c" containerName="glance-log" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.203575 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="7480b380-bd58-49cc-90f3-ceb592465c3c" containerName="glance-log" Oct 09 09:51:06 crc kubenswrapper[4710]: E1009 09:51:06.203617 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172" containerName="glance-log" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.203623 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172" containerName="glance-log" Oct 09 09:51:06 crc kubenswrapper[4710]: E1009 09:51:06.203630 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7480b380-bd58-49cc-90f3-ceb592465c3c" containerName="glance-httpd" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.203635 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="7480b380-bd58-49cc-90f3-ceb592465c3c" containerName="glance-httpd" Oct 09 09:51:06 crc kubenswrapper[4710]: E1009 09:51:06.203650 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5e73400-7814-42e4-879a-4193792ce918" containerName="mariadb-database-create" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.203655 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5e73400-7814-42e4-879a-4193792ce918" containerName="mariadb-database-create" Oct 09 09:51:06 crc kubenswrapper[4710]: E1009 09:51:06.203696 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172" containerName="glance-httpd" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.203702 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172" containerName="glance-httpd" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.203950 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="7480b380-bd58-49cc-90f3-ceb592465c3c" containerName="glance-httpd" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.203967 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172" containerName="glance-httpd" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.203978 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172" containerName="glance-log" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.204011 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5e73400-7814-42e4-879a-4193792ce918" containerName="mariadb-database-create" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.204019 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="7480b380-bd58-49cc-90f3-ceb592465c3c" containerName="glance-log" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.209776 4710 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.213240 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.217172 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.219189 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.222088 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-config-data" (OuterVolumeSpecName: "config-data") pod "7480b380-bd58-49cc-90f3-ceb592465c3c" (UID: "7480b380-bd58-49cc-90f3-ceb592465c3c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.222380 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.233623 4710 scope.go:117] "RemoveContainer" containerID="e690a16d810a1403cac8301d1225487d496bbca343d4ea57f0e759a20ccbbb45" Oct 09 09:51:06 crc kubenswrapper[4710]: E1009 09:51:06.239866 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e690a16d810a1403cac8301d1225487d496bbca343d4ea57f0e759a20ccbbb45\": container with ID starting with e690a16d810a1403cac8301d1225487d496bbca343d4ea57f0e759a20ccbbb45 not found: ID does not exist" containerID="e690a16d810a1403cac8301d1225487d496bbca343d4ea57f0e759a20ccbbb45" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.239892 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e690a16d810a1403cac8301d1225487d496bbca343d4ea57f0e759a20ccbbb45"} err="failed to get container status \"e690a16d810a1403cac8301d1225487d496bbca343d4ea57f0e759a20ccbbb45\": rpc error: code = NotFound desc = could not find container \"e690a16d810a1403cac8301d1225487d496bbca343d4ea57f0e759a20ccbbb45\": container with ID starting with e690a16d810a1403cac8301d1225487d496bbca343d4ea57f0e759a20ccbbb45 not found: ID does not exist" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.239919 4710 scope.go:117] "RemoveContainer" containerID="ab5e922809316cd3112c1161e1423187736dd947cee25c6000fb681d70b72771" Oct 09 09:51:06 crc kubenswrapper[4710]: E1009 09:51:06.245219 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab5e922809316cd3112c1161e1423187736dd947cee25c6000fb681d70b72771\": container with ID starting with ab5e922809316cd3112c1161e1423187736dd947cee25c6000fb681d70b72771 not found: ID does not exist" containerID="ab5e922809316cd3112c1161e1423187736dd947cee25c6000fb681d70b72771" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.245245 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab5e922809316cd3112c1161e1423187736dd947cee25c6000fb681d70b72771"} err="failed to get container status \"ab5e922809316cd3112c1161e1423187736dd947cee25c6000fb681d70b72771\": rpc error: code = NotFound desc = could not find container \"ab5e922809316cd3112c1161e1423187736dd947cee25c6000fb681d70b72771\": container with ID starting with ab5e922809316cd3112c1161e1423187736dd947cee25c6000fb681d70b72771 not found: ID does not exist" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.245262 4710 scope.go:117] "RemoveContainer" containerID="e690a16d810a1403cac8301d1225487d496bbca343d4ea57f0e759a20ccbbb45" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.255721 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e690a16d810a1403cac8301d1225487d496bbca343d4ea57f0e759a20ccbbb45"} err="failed to get container status \"e690a16d810a1403cac8301d1225487d496bbca343d4ea57f0e759a20ccbbb45\": rpc error: code = NotFound desc = could not find container \"e690a16d810a1403cac8301d1225487d496bbca343d4ea57f0e759a20ccbbb45\": container with ID starting with e690a16d810a1403cac8301d1225487d496bbca343d4ea57f0e759a20ccbbb45 not found: ID does not exist" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.255767 4710 scope.go:117] "RemoveContainer" containerID="ab5e922809316cd3112c1161e1423187736dd947cee25c6000fb681d70b72771" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.256246 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab5e922809316cd3112c1161e1423187736dd947cee25c6000fb681d70b72771"} err="failed to get container status \"ab5e922809316cd3112c1161e1423187736dd947cee25c6000fb681d70b72771\": rpc error: code = NotFound desc = could not find container \"ab5e922809316cd3112c1161e1423187736dd947cee25c6000fb681d70b72771\": container with ID starting with ab5e922809316cd3112c1161e1423187736dd947cee25c6000fb681d70b72771 not found: ID does not exist" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.256270 4710 scope.go:117] "RemoveContainer" containerID="9de14e1572c043c91317dc72410aa0a38b96e58c556d24cc85d3e151de4c8bd3" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.263459 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.263503 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.263556 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.263586 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.263605 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.263645 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.263665 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-logs\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.263689 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-ceph\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.263746 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvbwx\" (UniqueName: \"kubernetes.io/projected/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-kube-api-access-lvbwx\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.263805 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.263815 4710 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.263825 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.274948 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7480b380-bd58-49cc-90f3-ceb592465c3c" (UID: "7480b380-bd58-49cc-90f3-ceb592465c3c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.300699 4710 scope.go:117] "RemoveContainer" containerID="1638be628c4e65ed93562fefa6b9b576dc7ecb9d3528b90e8554d18c096bfed5" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.339882 4710 scope.go:117] "RemoveContainer" containerID="9de14e1572c043c91317dc72410aa0a38b96e58c556d24cc85d3e151de4c8bd3" Oct 09 09:51:06 crc kubenswrapper[4710]: E1009 09:51:06.340192 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9de14e1572c043c91317dc72410aa0a38b96e58c556d24cc85d3e151de4c8bd3\": container with ID starting with 9de14e1572c043c91317dc72410aa0a38b96e58c556d24cc85d3e151de4c8bd3 not found: ID does not exist" containerID="9de14e1572c043c91317dc72410aa0a38b96e58c556d24cc85d3e151de4c8bd3" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.340225 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9de14e1572c043c91317dc72410aa0a38b96e58c556d24cc85d3e151de4c8bd3"} err="failed to get container status \"9de14e1572c043c91317dc72410aa0a38b96e58c556d24cc85d3e151de4c8bd3\": rpc error: code = NotFound desc = could not find container \"9de14e1572c043c91317dc72410aa0a38b96e58c556d24cc85d3e151de4c8bd3\": container with ID starting with 9de14e1572c043c91317dc72410aa0a38b96e58c556d24cc85d3e151de4c8bd3 not found: ID does not exist" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.340245 4710 scope.go:117] "RemoveContainer" containerID="1638be628c4e65ed93562fefa6b9b576dc7ecb9d3528b90e8554d18c096bfed5" Oct 09 09:51:06 crc kubenswrapper[4710]: E1009 09:51:06.340666 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1638be628c4e65ed93562fefa6b9b576dc7ecb9d3528b90e8554d18c096bfed5\": container with ID starting with 1638be628c4e65ed93562fefa6b9b576dc7ecb9d3528b90e8554d18c096bfed5 not found: ID does not exist" containerID="1638be628c4e65ed93562fefa6b9b576dc7ecb9d3528b90e8554d18c096bfed5" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.340682 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1638be628c4e65ed93562fefa6b9b576dc7ecb9d3528b90e8554d18c096bfed5"} err="failed to get container status \"1638be628c4e65ed93562fefa6b9b576dc7ecb9d3528b90e8554d18c096bfed5\": rpc error: code = NotFound desc = could not find container \"1638be628c4e65ed93562fefa6b9b576dc7ecb9d3528b90e8554d18c096bfed5\": container with ID starting with 1638be628c4e65ed93562fefa6b9b576dc7ecb9d3528b90e8554d18c096bfed5 not found: ID does not exist" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.340695 4710 scope.go:117] "RemoveContainer" containerID="9de14e1572c043c91317dc72410aa0a38b96e58c556d24cc85d3e151de4c8bd3" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.340906 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9de14e1572c043c91317dc72410aa0a38b96e58c556d24cc85d3e151de4c8bd3"} err="failed to get container status \"9de14e1572c043c91317dc72410aa0a38b96e58c556d24cc85d3e151de4c8bd3\": rpc error: code = NotFound desc = could not find container \"9de14e1572c043c91317dc72410aa0a38b96e58c556d24cc85d3e151de4c8bd3\": container with ID starting with 9de14e1572c043c91317dc72410aa0a38b96e58c556d24cc85d3e151de4c8bd3 not found: ID does not exist" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.340930 4710 scope.go:117] "RemoveContainer" containerID="1638be628c4e65ed93562fefa6b9b576dc7ecb9d3528b90e8554d18c096bfed5" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.341146 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1638be628c4e65ed93562fefa6b9b576dc7ecb9d3528b90e8554d18c096bfed5"} err="failed to get container status \"1638be628c4e65ed93562fefa6b9b576dc7ecb9d3528b90e8554d18c096bfed5\": rpc error: code = NotFound desc = could not find container \"1638be628c4e65ed93562fefa6b9b576dc7ecb9d3528b90e8554d18c096bfed5\": container with ID starting with 1638be628c4e65ed93562fefa6b9b576dc7ecb9d3528b90e8554d18c096bfed5 not found: ID does not exist" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.371570 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.371808 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.371946 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.372036 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.372111 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.372214 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.372228 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.372363 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-logs\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.372452 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-ceph\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.372596 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvbwx\" (UniqueName: \"kubernetes.io/projected/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-kube-api-access-lvbwx\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.373580 4710 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7480b380-bd58-49cc-90f3-ceb592465c3c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.374252 4710 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.377947 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.381052 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.381450 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-logs\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.392572 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.412667 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.418635 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvbwx\" (UniqueName: \"kubernetes.io/projected/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-kube-api-access-lvbwx\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.422088 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/48c52c4c-a94f-4044-bbbe-9c8e935f1a9c-ceph\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.486496 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-internal-api-0\" (UID: \"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c\") " pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.500543 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.538749 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.547071 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.558460 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.562386 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.566105 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.566305 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.574616 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.581567 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/353d2111-74ef-4b2e-b17d-5e0672f1a33d-logs\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.581696 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/353d2111-74ef-4b2e-b17d-5e0672f1a33d-config-data\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.581796 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/353d2111-74ef-4b2e-b17d-5e0672f1a33d-scripts\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.581881 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/353d2111-74ef-4b2e-b17d-5e0672f1a33d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.582001 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/353d2111-74ef-4b2e-b17d-5e0672f1a33d-ceph\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.582121 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.582204 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mzt7\" (UniqueName: \"kubernetes.io/projected/353d2111-74ef-4b2e-b17d-5e0672f1a33d-kube-api-access-8mzt7\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.582304 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/353d2111-74ef-4b2e-b17d-5e0672f1a33d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.582380 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/353d2111-74ef-4b2e-b17d-5e0672f1a33d-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.684333 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/353d2111-74ef-4b2e-b17d-5e0672f1a33d-config-data\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.684388 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/353d2111-74ef-4b2e-b17d-5e0672f1a33d-scripts\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.684438 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/353d2111-74ef-4b2e-b17d-5e0672f1a33d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.684524 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/353d2111-74ef-4b2e-b17d-5e0672f1a33d-ceph\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.684550 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.684583 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mzt7\" (UniqueName: \"kubernetes.io/projected/353d2111-74ef-4b2e-b17d-5e0672f1a33d-kube-api-access-8mzt7\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.684662 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/353d2111-74ef-4b2e-b17d-5e0672f1a33d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.684703 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/353d2111-74ef-4b2e-b17d-5e0672f1a33d-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.684768 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/353d2111-74ef-4b2e-b17d-5e0672f1a33d-logs\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.685002 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/353d2111-74ef-4b2e-b17d-5e0672f1a33d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.689336 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/353d2111-74ef-4b2e-b17d-5e0672f1a33d-config-data\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.689357 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/353d2111-74ef-4b2e-b17d-5e0672f1a33d-ceph\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.690935 4710 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.691080 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/353d2111-74ef-4b2e-b17d-5e0672f1a33d-logs\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.699197 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/353d2111-74ef-4b2e-b17d-5e0672f1a33d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.701083 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/353d2111-74ef-4b2e-b17d-5e0672f1a33d-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.701543 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/353d2111-74ef-4b2e-b17d-5e0672f1a33d-scripts\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.720641 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mzt7\" (UniqueName: \"kubernetes.io/projected/353d2111-74ef-4b2e-b17d-5e0672f1a33d-kube-api-access-8mzt7\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.744416 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"353d2111-74ef-4b2e-b17d-5e0672f1a33d\") " pod="openstack/glance-default-external-api-0" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.846886 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172" path="/var/lib/kubelet/pods/1e6c3490-3e7a-4eb2-95a1-6a6dd5e49172/volumes" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.847958 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7480b380-bd58-49cc-90f3-ceb592465c3c" path="/var/lib/kubelet/pods/7480b380-bd58-49cc-90f3-ceb592465c3c/volumes" Oct 09 09:51:06 crc kubenswrapper[4710]: I1009 09:51:06.900063 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 09 09:51:07 crc kubenswrapper[4710]: I1009 09:51:07.356808 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 09 09:51:07 crc kubenswrapper[4710]: I1009 09:51:07.503348 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2qs8n" Oct 09 09:51:07 crc kubenswrapper[4710]: I1009 09:51:07.503396 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2qs8n" Oct 09 09:51:07 crc kubenswrapper[4710]: I1009 09:51:07.591937 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 09 09:51:08 crc kubenswrapper[4710]: I1009 09:51:08.178453 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c","Type":"ContainerStarted","Data":"2806ca57371b6065102451e263e95e8c1330916f75444de76d20f130f6b852c9"} Oct 09 09:51:08 crc kubenswrapper[4710]: I1009 09:51:08.184595 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"353d2111-74ef-4b2e-b17d-5e0672f1a33d","Type":"ContainerStarted","Data":"a5ed0d3055982ad4777e47658a03142270ff9db4a83c6e84bafdb5d9598a6cd1"} Oct 09 09:51:08 crc kubenswrapper[4710]: I1009 09:51:08.586145 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2qs8n" podUID="a7aa8b50-9d7d-4735-8e06-0931decf7a95" containerName="registry-server" probeResult="failure" output=< Oct 09 09:51:08 crc kubenswrapper[4710]: timeout: failed to connect service ":50051" within 1s Oct 09 09:51:08 crc kubenswrapper[4710]: > Oct 09 09:51:09 crc kubenswrapper[4710]: I1009 09:51:09.197179 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c","Type":"ContainerStarted","Data":"e45020cfa0effb23440d13029453e78696ea9bd7b0ddaa4758379e91aebba87d"} Oct 09 09:51:09 crc kubenswrapper[4710]: I1009 09:51:09.197743 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"48c52c4c-a94f-4044-bbbe-9c8e935f1a9c","Type":"ContainerStarted","Data":"fc37bbe252b520e59aa9ba613592c7e70921c31b12e184b3312d6df640a581bd"} Oct 09 09:51:09 crc kubenswrapper[4710]: I1009 09:51:09.203991 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"353d2111-74ef-4b2e-b17d-5e0672f1a33d","Type":"ContainerStarted","Data":"a6af297e98d266856d9969d03fc777765124681dfa362b2f2fbea978e7b8ae8e"} Oct 09 09:51:10 crc kubenswrapper[4710]: I1009 09:51:10.219682 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"353d2111-74ef-4b2e-b17d-5e0672f1a33d","Type":"ContainerStarted","Data":"3d57d3f4f7dfcc71e21df4d83defea80569cd4bf4ad4eec3d74d0d13a7a0bc5a"} Oct 09 09:51:10 crc kubenswrapper[4710]: I1009 09:51:10.248261 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.248246603 podStartE2EDuration="4.248246603s" podCreationTimestamp="2025-10-09 09:51:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:51:10.235411453 +0000 UTC m=+2793.725519850" watchObservedRunningTime="2025-10-09 09:51:10.248246603 +0000 UTC m=+2793.738355000" Oct 09 09:51:10 crc kubenswrapper[4710]: I1009 09:51:10.268524 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.268510656 podStartE2EDuration="4.268510656s" podCreationTimestamp="2025-10-09 09:51:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:51:10.2601096 +0000 UTC m=+2793.750218017" watchObservedRunningTime="2025-10-09 09:51:10.268510656 +0000 UTC m=+2793.758619053" Oct 09 09:51:10 crc kubenswrapper[4710]: I1009 09:51:10.923774 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Oct 09 09:51:10 crc kubenswrapper[4710]: I1009 09:51:10.973003 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-aa4d-account-create-xzmtz"] Oct 09 09:51:10 crc kubenswrapper[4710]: I1009 09:51:10.974457 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-aa4d-account-create-xzmtz" Oct 09 09:51:10 crc kubenswrapper[4710]: I1009 09:51:10.977593 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-aa4d-account-create-xzmtz"] Oct 09 09:51:10 crc kubenswrapper[4710]: I1009 09:51:10.980947 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Oct 09 09:51:11 crc kubenswrapper[4710]: I1009 09:51:11.057876 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Oct 09 09:51:11 crc kubenswrapper[4710]: I1009 09:51:11.108769 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4r6z\" (UniqueName: \"kubernetes.io/projected/9aa93c2c-c428-4ae9-b34a-2b9203dcdb8c-kube-api-access-w4r6z\") pod \"manila-aa4d-account-create-xzmtz\" (UID: \"9aa93c2c-c428-4ae9-b34a-2b9203dcdb8c\") " pod="openstack/manila-aa4d-account-create-xzmtz" Oct 09 09:51:11 crc kubenswrapper[4710]: I1009 09:51:11.212097 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4r6z\" (UniqueName: \"kubernetes.io/projected/9aa93c2c-c428-4ae9-b34a-2b9203dcdb8c-kube-api-access-w4r6z\") pod \"manila-aa4d-account-create-xzmtz\" (UID: \"9aa93c2c-c428-4ae9-b34a-2b9203dcdb8c\") " pod="openstack/manila-aa4d-account-create-xzmtz" Oct 09 09:51:11 crc kubenswrapper[4710]: I1009 09:51:11.231485 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4r6z\" (UniqueName: \"kubernetes.io/projected/9aa93c2c-c428-4ae9-b34a-2b9203dcdb8c-kube-api-access-w4r6z\") pod \"manila-aa4d-account-create-xzmtz\" (UID: \"9aa93c2c-c428-4ae9-b34a-2b9203dcdb8c\") " pod="openstack/manila-aa4d-account-create-xzmtz" Oct 09 09:51:11 crc kubenswrapper[4710]: I1009 09:51:11.312068 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-aa4d-account-create-xzmtz" Oct 09 09:51:11 crc kubenswrapper[4710]: I1009 09:51:11.782945 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-aa4d-account-create-xzmtz"] Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.293073 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67d94d7dc8-fvmp7" event={"ID":"13eb4841-8d3a-4ef6-a2da-656bab482ab4","Type":"ContainerStarted","Data":"24070591538ffa6a2ea3681a52b8133faad620ad66ce0bca9ca638e2186dad2e"} Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.297515 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-598b695687-fxzw5" event={"ID":"5643934e-9c27-457b-b0ad-5c8d5a635d26","Type":"ContainerStarted","Data":"ebbf23a6846675dfe200768beecc4f87011af3f0cd2254504b705224a5524ccf"} Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.297591 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-598b695687-fxzw5" event={"ID":"5643934e-9c27-457b-b0ad-5c8d5a635d26","Type":"ContainerStarted","Data":"454451ac6714dbe06258d42d89e9890f2d048b42d4188e0a6bd30fc1862de852"} Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.297834 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-598b695687-fxzw5" podUID="5643934e-9c27-457b-b0ad-5c8d5a635d26" containerName="horizon-log" containerID="cri-o://454451ac6714dbe06258d42d89e9890f2d048b42d4188e0a6bd30fc1862de852" gracePeriod=30 Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.298751 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-598b695687-fxzw5" podUID="5643934e-9c27-457b-b0ad-5c8d5a635d26" containerName="horizon" containerID="cri-o://ebbf23a6846675dfe200768beecc4f87011af3f0cd2254504b705224a5524ccf" gracePeriod=30 Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.304554 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dd45d474-nc99p" event={"ID":"c574f93a-f78c-43c0-a159-797171f39d0b","Type":"ContainerStarted","Data":"d53c16c67c5b0020f557c562195ccfa873e3b2e20bbf3ea62771bea097dc7b4a"} Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.318095 4710 generic.go:334] "Generic (PLEG): container finished" podID="9aa93c2c-c428-4ae9-b34a-2b9203dcdb8c" containerID="e8643c6d2ce0c98257dcfc4d9d3c532380f31b11d903756876aca4082f885ecd" exitCode=0 Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.318156 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-aa4d-account-create-xzmtz" event={"ID":"9aa93c2c-c428-4ae9-b34a-2b9203dcdb8c","Type":"ContainerDied","Data":"e8643c6d2ce0c98257dcfc4d9d3c532380f31b11d903756876aca4082f885ecd"} Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.318179 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-aa4d-account-create-xzmtz" event={"ID":"9aa93c2c-c428-4ae9-b34a-2b9203dcdb8c","Type":"ContainerStarted","Data":"473ffb57cec2b5070ba2a26550a1bcc0cca0b383b52b4f8efaac7ad35b5632c1"} Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.326908 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-598b695687-fxzw5" podStartSLOduration=2.096288973 podStartE2EDuration="15.326898167s" podCreationTimestamp="2025-10-09 09:51:01 +0000 UTC" firstStartedPulling="2025-10-09 09:51:02.513502751 +0000 UTC m=+2786.003611147" lastFinishedPulling="2025-10-09 09:51:15.744111945 +0000 UTC m=+2799.234220341" observedRunningTime="2025-10-09 09:51:16.323691978 +0000 UTC m=+2799.813800376" watchObservedRunningTime="2025-10-09 09:51:16.326898167 +0000 UTC m=+2799.817006564" Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.329928 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55df4dcbd5-bmlgb" event={"ID":"a092a4bc-968b-408e-a835-0180661e24e6","Type":"ContainerStarted","Data":"8c457c956c70edbd7efe99b90e4dd821617879f24090251a2db3e16d2faf751d"} Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.329959 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55df4dcbd5-bmlgb" event={"ID":"a092a4bc-968b-408e-a835-0180661e24e6","Type":"ContainerStarted","Data":"709a0e716efd3a9a42ebb0fc33cc847a7fb581fe785592d69d215556d9df5058"} Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.330076 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-55df4dcbd5-bmlgb" podUID="a092a4bc-968b-408e-a835-0180661e24e6" containerName="horizon-log" containerID="cri-o://709a0e716efd3a9a42ebb0fc33cc847a7fb581fe785592d69d215556d9df5058" gracePeriod=30 Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.330303 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-55df4dcbd5-bmlgb" podUID="a092a4bc-968b-408e-a835-0180661e24e6" containerName="horizon" containerID="cri-o://8c457c956c70edbd7efe99b90e4dd821617879f24090251a2db3e16d2faf751d" gracePeriod=30 Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.380353 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-55df4dcbd5-bmlgb" podStartSLOduration=2.526888849 podStartE2EDuration="16.380337708s" podCreationTimestamp="2025-10-09 09:51:00 +0000 UTC" firstStartedPulling="2025-10-09 09:51:01.965525066 +0000 UTC m=+2785.455633463" lastFinishedPulling="2025-10-09 09:51:15.818973925 +0000 UTC m=+2799.309082322" observedRunningTime="2025-10-09 09:51:16.37363776 +0000 UTC m=+2799.863746157" watchObservedRunningTime="2025-10-09 09:51:16.380337708 +0000 UTC m=+2799.870446105" Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.548199 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.548244 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.576881 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.584164 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.900781 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.900850 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.937728 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 09 09:51:16 crc kubenswrapper[4710]: I1009 09:51:16.940492 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 09 09:51:17 crc kubenswrapper[4710]: I1009 09:51:17.340529 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dd45d474-nc99p" event={"ID":"c574f93a-f78c-43c0-a159-797171f39d0b","Type":"ContainerStarted","Data":"07262d3590ed932bf374182e51c7e52eb519a990dfa2d33d6f54d6053d4c7dc2"} Oct 09 09:51:17 crc kubenswrapper[4710]: I1009 09:51:17.342450 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67d94d7dc8-fvmp7" event={"ID":"13eb4841-8d3a-4ef6-a2da-656bab482ab4","Type":"ContainerStarted","Data":"520ba8b51e55b1744f306629a67bb524054a9b15ca9617c5f9beb0488339f5ea"} Oct 09 09:51:17 crc kubenswrapper[4710]: I1009 09:51:17.343102 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 09 09:51:17 crc kubenswrapper[4710]: I1009 09:51:17.343192 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 09 09:51:17 crc kubenswrapper[4710]: I1009 09:51:17.343255 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 09 09:51:17 crc kubenswrapper[4710]: I1009 09:51:17.344376 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 09 09:51:17 crc kubenswrapper[4710]: I1009 09:51:17.366503 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-dd45d474-nc99p" podStartSLOduration=3.762354777 podStartE2EDuration="14.366492424s" podCreationTimestamp="2025-10-09 09:51:03 +0000 UTC" firstStartedPulling="2025-10-09 09:51:05.189324304 +0000 UTC m=+2788.679432701" lastFinishedPulling="2025-10-09 09:51:15.793461952 +0000 UTC m=+2799.283570348" observedRunningTime="2025-10-09 09:51:17.365655094 +0000 UTC m=+2800.855763481" watchObservedRunningTime="2025-10-09 09:51:17.366492424 +0000 UTC m=+2800.856600811" Oct 09 09:51:17 crc kubenswrapper[4710]: I1009 09:51:17.898316 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-aa4d-account-create-xzmtz" Oct 09 09:51:17 crc kubenswrapper[4710]: I1009 09:51:17.918395 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-67d94d7dc8-fvmp7" podStartSLOduration=4.545587133 podStartE2EDuration="14.918377217s" podCreationTimestamp="2025-10-09 09:51:03 +0000 UTC" firstStartedPulling="2025-10-09 09:51:05.389206456 +0000 UTC m=+2788.879314853" lastFinishedPulling="2025-10-09 09:51:15.761996539 +0000 UTC m=+2799.252104937" observedRunningTime="2025-10-09 09:51:17.388443208 +0000 UTC m=+2800.878551606" watchObservedRunningTime="2025-10-09 09:51:17.918377217 +0000 UTC m=+2801.408485615" Oct 09 09:51:17 crc kubenswrapper[4710]: I1009 09:51:17.996489 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4r6z\" (UniqueName: \"kubernetes.io/projected/9aa93c2c-c428-4ae9-b34a-2b9203dcdb8c-kube-api-access-w4r6z\") pod \"9aa93c2c-c428-4ae9-b34a-2b9203dcdb8c\" (UID: \"9aa93c2c-c428-4ae9-b34a-2b9203dcdb8c\") " Oct 09 09:51:18 crc kubenswrapper[4710]: I1009 09:51:18.013912 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9aa93c2c-c428-4ae9-b34a-2b9203dcdb8c-kube-api-access-w4r6z" (OuterVolumeSpecName: "kube-api-access-w4r6z") pod "9aa93c2c-c428-4ae9-b34a-2b9203dcdb8c" (UID: "9aa93c2c-c428-4ae9-b34a-2b9203dcdb8c"). InnerVolumeSpecName "kube-api-access-w4r6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:51:18 crc kubenswrapper[4710]: I1009 09:51:18.101009 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4r6z\" (UniqueName: \"kubernetes.io/projected/9aa93c2c-c428-4ae9-b34a-2b9203dcdb8c-kube-api-access-w4r6z\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:18 crc kubenswrapper[4710]: I1009 09:51:18.355271 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-aa4d-account-create-xzmtz" Oct 09 09:51:18 crc kubenswrapper[4710]: I1009 09:51:18.357561 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-aa4d-account-create-xzmtz" event={"ID":"9aa93c2c-c428-4ae9-b34a-2b9203dcdb8c","Type":"ContainerDied","Data":"473ffb57cec2b5070ba2a26550a1bcc0cca0b383b52b4f8efaac7ad35b5632c1"} Oct 09 09:51:18 crc kubenswrapper[4710]: I1009 09:51:18.357622 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="473ffb57cec2b5070ba2a26550a1bcc0cca0b383b52b4f8efaac7ad35b5632c1" Oct 09 09:51:18 crc kubenswrapper[4710]: I1009 09:51:18.548544 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2qs8n" podUID="a7aa8b50-9d7d-4735-8e06-0931decf7a95" containerName="registry-server" probeResult="failure" output=< Oct 09 09:51:18 crc kubenswrapper[4710]: timeout: failed to connect service ":50051" within 1s Oct 09 09:51:18 crc kubenswrapper[4710]: > Oct 09 09:51:19 crc kubenswrapper[4710]: I1009 09:51:19.364267 4710 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 09 09:51:19 crc kubenswrapper[4710]: I1009 09:51:19.364763 4710 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 09 09:51:19 crc kubenswrapper[4710]: I1009 09:51:19.364313 4710 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 09 09:51:19 crc kubenswrapper[4710]: I1009 09:51:19.364898 4710 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 09 09:51:20 crc kubenswrapper[4710]: I1009 09:51:20.161934 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 09 09:51:20 crc kubenswrapper[4710]: I1009 09:51:20.165175 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 09 09:51:20 crc kubenswrapper[4710]: I1009 09:51:20.187044 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 09 09:51:20 crc kubenswrapper[4710]: I1009 09:51:20.189482 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 09 09:51:21 crc kubenswrapper[4710]: I1009 09:51:21.342004 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-55df4dcbd5-bmlgb" Oct 09 09:51:21 crc kubenswrapper[4710]: I1009 09:51:21.472327 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-qdvtf"] Oct 09 09:51:21 crc kubenswrapper[4710]: E1009 09:51:21.472943 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9aa93c2c-c428-4ae9-b34a-2b9203dcdb8c" containerName="mariadb-account-create" Oct 09 09:51:21 crc kubenswrapper[4710]: I1009 09:51:21.473025 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="9aa93c2c-c428-4ae9-b34a-2b9203dcdb8c" containerName="mariadb-account-create" Oct 09 09:51:21 crc kubenswrapper[4710]: I1009 09:51:21.473268 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="9aa93c2c-c428-4ae9-b34a-2b9203dcdb8c" containerName="mariadb-account-create" Oct 09 09:51:21 crc kubenswrapper[4710]: I1009 09:51:21.474010 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-qdvtf" Oct 09 09:51:21 crc kubenswrapper[4710]: I1009 09:51:21.478826 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Oct 09 09:51:21 crc kubenswrapper[4710]: I1009 09:51:21.478826 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-9bhwl" Oct 09 09:51:21 crc kubenswrapper[4710]: I1009 09:51:21.491834 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-qdvtf"] Oct 09 09:51:21 crc kubenswrapper[4710]: I1009 09:51:21.601320 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cdc5804-ffca-4474-a386-1f803b4a1c23-config-data\") pod \"manila-db-sync-qdvtf\" (UID: \"4cdc5804-ffca-4474-a386-1f803b4a1c23\") " pod="openstack/manila-db-sync-qdvtf" Oct 09 09:51:21 crc kubenswrapper[4710]: I1009 09:51:21.601540 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cdc5804-ffca-4474-a386-1f803b4a1c23-combined-ca-bundle\") pod \"manila-db-sync-qdvtf\" (UID: \"4cdc5804-ffca-4474-a386-1f803b4a1c23\") " pod="openstack/manila-db-sync-qdvtf" Oct 09 09:51:21 crc kubenswrapper[4710]: I1009 09:51:21.601650 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/4cdc5804-ffca-4474-a386-1f803b4a1c23-job-config-data\") pod \"manila-db-sync-qdvtf\" (UID: \"4cdc5804-ffca-4474-a386-1f803b4a1c23\") " pod="openstack/manila-db-sync-qdvtf" Oct 09 09:51:21 crc kubenswrapper[4710]: I1009 09:51:21.601744 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htffv\" (UniqueName: \"kubernetes.io/projected/4cdc5804-ffca-4474-a386-1f803b4a1c23-kube-api-access-htffv\") pod \"manila-db-sync-qdvtf\" (UID: \"4cdc5804-ffca-4474-a386-1f803b4a1c23\") " pod="openstack/manila-db-sync-qdvtf" Oct 09 09:51:21 crc kubenswrapper[4710]: I1009 09:51:21.703482 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cdc5804-ffca-4474-a386-1f803b4a1c23-config-data\") pod \"manila-db-sync-qdvtf\" (UID: \"4cdc5804-ffca-4474-a386-1f803b4a1c23\") " pod="openstack/manila-db-sync-qdvtf" Oct 09 09:51:21 crc kubenswrapper[4710]: I1009 09:51:21.703785 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cdc5804-ffca-4474-a386-1f803b4a1c23-combined-ca-bundle\") pod \"manila-db-sync-qdvtf\" (UID: \"4cdc5804-ffca-4474-a386-1f803b4a1c23\") " pod="openstack/manila-db-sync-qdvtf" Oct 09 09:51:21 crc kubenswrapper[4710]: I1009 09:51:21.703828 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/4cdc5804-ffca-4474-a386-1f803b4a1c23-job-config-data\") pod \"manila-db-sync-qdvtf\" (UID: \"4cdc5804-ffca-4474-a386-1f803b4a1c23\") " pod="openstack/manila-db-sync-qdvtf" Oct 09 09:51:21 crc kubenswrapper[4710]: I1009 09:51:21.704394 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htffv\" (UniqueName: \"kubernetes.io/projected/4cdc5804-ffca-4474-a386-1f803b4a1c23-kube-api-access-htffv\") pod \"manila-db-sync-qdvtf\" (UID: \"4cdc5804-ffca-4474-a386-1f803b4a1c23\") " pod="openstack/manila-db-sync-qdvtf" Oct 09 09:51:21 crc kubenswrapper[4710]: I1009 09:51:21.712942 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/4cdc5804-ffca-4474-a386-1f803b4a1c23-job-config-data\") pod \"manila-db-sync-qdvtf\" (UID: \"4cdc5804-ffca-4474-a386-1f803b4a1c23\") " pod="openstack/manila-db-sync-qdvtf" Oct 09 09:51:21 crc kubenswrapper[4710]: I1009 09:51:21.713026 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cdc5804-ffca-4474-a386-1f803b4a1c23-combined-ca-bundle\") pod \"manila-db-sync-qdvtf\" (UID: \"4cdc5804-ffca-4474-a386-1f803b4a1c23\") " pod="openstack/manila-db-sync-qdvtf" Oct 09 09:51:21 crc kubenswrapper[4710]: I1009 09:51:21.723940 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htffv\" (UniqueName: \"kubernetes.io/projected/4cdc5804-ffca-4474-a386-1f803b4a1c23-kube-api-access-htffv\") pod \"manila-db-sync-qdvtf\" (UID: \"4cdc5804-ffca-4474-a386-1f803b4a1c23\") " pod="openstack/manila-db-sync-qdvtf" Oct 09 09:51:21 crc kubenswrapper[4710]: I1009 09:51:21.748185 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cdc5804-ffca-4474-a386-1f803b4a1c23-config-data\") pod \"manila-db-sync-qdvtf\" (UID: \"4cdc5804-ffca-4474-a386-1f803b4a1c23\") " pod="openstack/manila-db-sync-qdvtf" Oct 09 09:51:21 crc kubenswrapper[4710]: I1009 09:51:21.797078 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-qdvtf" Oct 09 09:51:21 crc kubenswrapper[4710]: I1009 09:51:21.799150 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-598b695687-fxzw5" Oct 09 09:51:22 crc kubenswrapper[4710]: I1009 09:51:22.450664 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-qdvtf"] Oct 09 09:51:23 crc kubenswrapper[4710]: I1009 09:51:23.397247 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-qdvtf" event={"ID":"4cdc5804-ffca-4474-a386-1f803b4a1c23","Type":"ContainerStarted","Data":"ffd88338728ec340ee1210fbca579dcb576153e32e6cadfd503160a389cd7ed5"} Oct 09 09:51:24 crc kubenswrapper[4710]: I1009 09:51:24.278416 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:24 crc kubenswrapper[4710]: I1009 09:51:24.278505 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:24 crc kubenswrapper[4710]: I1009 09:51:24.338921 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:24 crc kubenswrapper[4710]: I1009 09:51:24.338979 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:27 crc kubenswrapper[4710]: I1009 09:51:27.565647 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2qs8n" Oct 09 09:51:27 crc kubenswrapper[4710]: I1009 09:51:27.636538 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2qs8n" Oct 09 09:51:28 crc kubenswrapper[4710]: I1009 09:51:28.391145 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2qs8n"] Oct 09 09:51:29 crc kubenswrapper[4710]: I1009 09:51:29.457554 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-qdvtf" event={"ID":"4cdc5804-ffca-4474-a386-1f803b4a1c23","Type":"ContainerStarted","Data":"5cdee0ea65674be2a87b16dbbd851745808c152f848a4f445fd99095cf5a10a1"} Oct 09 09:51:29 crc kubenswrapper[4710]: I1009 09:51:29.457846 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2qs8n" podUID="a7aa8b50-9d7d-4735-8e06-0931decf7a95" containerName="registry-server" containerID="cri-o://7b9382177ae66a914ac271068f1d130beceb2f316659ef3a48c42e8a111086bd" gracePeriod=2 Oct 09 09:51:29 crc kubenswrapper[4710]: I1009 09:51:29.488044 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-qdvtf" podStartSLOduration=2.165712033 podStartE2EDuration="8.488022834s" podCreationTimestamp="2025-10-09 09:51:21 +0000 UTC" firstStartedPulling="2025-10-09 09:51:22.459822985 +0000 UTC m=+2805.949931382" lastFinishedPulling="2025-10-09 09:51:28.782133776 +0000 UTC m=+2812.272242183" observedRunningTime="2025-10-09 09:51:29.482390501 +0000 UTC m=+2812.972498897" watchObservedRunningTime="2025-10-09 09:51:29.488022834 +0000 UTC m=+2812.978131231" Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.260677 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2qs8n" Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.332325 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7aa8b50-9d7d-4735-8e06-0931decf7a95-catalog-content\") pod \"a7aa8b50-9d7d-4735-8e06-0931decf7a95\" (UID: \"a7aa8b50-9d7d-4735-8e06-0931decf7a95\") " Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.332470 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7aa8b50-9d7d-4735-8e06-0931decf7a95-utilities\") pod \"a7aa8b50-9d7d-4735-8e06-0931decf7a95\" (UID: \"a7aa8b50-9d7d-4735-8e06-0931decf7a95\") " Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.332714 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6w6kn\" (UniqueName: \"kubernetes.io/projected/a7aa8b50-9d7d-4735-8e06-0931decf7a95-kube-api-access-6w6kn\") pod \"a7aa8b50-9d7d-4735-8e06-0931decf7a95\" (UID: \"a7aa8b50-9d7d-4735-8e06-0931decf7a95\") " Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.333856 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7aa8b50-9d7d-4735-8e06-0931decf7a95-utilities" (OuterVolumeSpecName: "utilities") pod "a7aa8b50-9d7d-4735-8e06-0931decf7a95" (UID: "a7aa8b50-9d7d-4735-8e06-0931decf7a95"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.358353 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7aa8b50-9d7d-4735-8e06-0931decf7a95-kube-api-access-6w6kn" (OuterVolumeSpecName: "kube-api-access-6w6kn") pod "a7aa8b50-9d7d-4735-8e06-0931decf7a95" (UID: "a7aa8b50-9d7d-4735-8e06-0931decf7a95"). InnerVolumeSpecName "kube-api-access-6w6kn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.393253 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7aa8b50-9d7d-4735-8e06-0931decf7a95-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a7aa8b50-9d7d-4735-8e06-0931decf7a95" (UID: "a7aa8b50-9d7d-4735-8e06-0931decf7a95"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.436243 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7aa8b50-9d7d-4735-8e06-0931decf7a95-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.436278 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6w6kn\" (UniqueName: \"kubernetes.io/projected/a7aa8b50-9d7d-4735-8e06-0931decf7a95-kube-api-access-6w6kn\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.436292 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7aa8b50-9d7d-4735-8e06-0931decf7a95-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.470424 4710 generic.go:334] "Generic (PLEG): container finished" podID="a7aa8b50-9d7d-4735-8e06-0931decf7a95" containerID="7b9382177ae66a914ac271068f1d130beceb2f316659ef3a48c42e8a111086bd" exitCode=0 Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.470563 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2qs8n" Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.470630 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qs8n" event={"ID":"a7aa8b50-9d7d-4735-8e06-0931decf7a95","Type":"ContainerDied","Data":"7b9382177ae66a914ac271068f1d130beceb2f316659ef3a48c42e8a111086bd"} Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.470691 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qs8n" event={"ID":"a7aa8b50-9d7d-4735-8e06-0931decf7a95","Type":"ContainerDied","Data":"389b6f1051b0af26b6c23671c31e31f8090fb9be90df73f14664c2f4e8305fbf"} Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.470715 4710 scope.go:117] "RemoveContainer" containerID="7b9382177ae66a914ac271068f1d130beceb2f316659ef3a48c42e8a111086bd" Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.526235 4710 scope.go:117] "RemoveContainer" containerID="3317b7afa5586ed3e1c71bc59b85047228609461190c0eab0f72490556b88baf" Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.542363 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2qs8n"] Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.552317 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2qs8n"] Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.583557 4710 scope.go:117] "RemoveContainer" containerID="aeac23ad6705e7c0ce818d87025a5b16130bec7bd30cd7966981d6e84cf74202" Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.608786 4710 scope.go:117] "RemoveContainer" containerID="7b9382177ae66a914ac271068f1d130beceb2f316659ef3a48c42e8a111086bd" Oct 09 09:51:30 crc kubenswrapper[4710]: E1009 09:51:30.609152 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b9382177ae66a914ac271068f1d130beceb2f316659ef3a48c42e8a111086bd\": container with ID starting with 7b9382177ae66a914ac271068f1d130beceb2f316659ef3a48c42e8a111086bd not found: ID does not exist" containerID="7b9382177ae66a914ac271068f1d130beceb2f316659ef3a48c42e8a111086bd" Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.609180 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b9382177ae66a914ac271068f1d130beceb2f316659ef3a48c42e8a111086bd"} err="failed to get container status \"7b9382177ae66a914ac271068f1d130beceb2f316659ef3a48c42e8a111086bd\": rpc error: code = NotFound desc = could not find container \"7b9382177ae66a914ac271068f1d130beceb2f316659ef3a48c42e8a111086bd\": container with ID starting with 7b9382177ae66a914ac271068f1d130beceb2f316659ef3a48c42e8a111086bd not found: ID does not exist" Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.609205 4710 scope.go:117] "RemoveContainer" containerID="3317b7afa5586ed3e1c71bc59b85047228609461190c0eab0f72490556b88baf" Oct 09 09:51:30 crc kubenswrapper[4710]: E1009 09:51:30.609627 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3317b7afa5586ed3e1c71bc59b85047228609461190c0eab0f72490556b88baf\": container with ID starting with 3317b7afa5586ed3e1c71bc59b85047228609461190c0eab0f72490556b88baf not found: ID does not exist" containerID="3317b7afa5586ed3e1c71bc59b85047228609461190c0eab0f72490556b88baf" Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.609648 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3317b7afa5586ed3e1c71bc59b85047228609461190c0eab0f72490556b88baf"} err="failed to get container status \"3317b7afa5586ed3e1c71bc59b85047228609461190c0eab0f72490556b88baf\": rpc error: code = NotFound desc = could not find container \"3317b7afa5586ed3e1c71bc59b85047228609461190c0eab0f72490556b88baf\": container with ID starting with 3317b7afa5586ed3e1c71bc59b85047228609461190c0eab0f72490556b88baf not found: ID does not exist" Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.609662 4710 scope.go:117] "RemoveContainer" containerID="aeac23ad6705e7c0ce818d87025a5b16130bec7bd30cd7966981d6e84cf74202" Oct 09 09:51:30 crc kubenswrapper[4710]: E1009 09:51:30.609990 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aeac23ad6705e7c0ce818d87025a5b16130bec7bd30cd7966981d6e84cf74202\": container with ID starting with aeac23ad6705e7c0ce818d87025a5b16130bec7bd30cd7966981d6e84cf74202 not found: ID does not exist" containerID="aeac23ad6705e7c0ce818d87025a5b16130bec7bd30cd7966981d6e84cf74202" Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.610011 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aeac23ad6705e7c0ce818d87025a5b16130bec7bd30cd7966981d6e84cf74202"} err="failed to get container status \"aeac23ad6705e7c0ce818d87025a5b16130bec7bd30cd7966981d6e84cf74202\": rpc error: code = NotFound desc = could not find container \"aeac23ad6705e7c0ce818d87025a5b16130bec7bd30cd7966981d6e84cf74202\": container with ID starting with aeac23ad6705e7c0ce818d87025a5b16130bec7bd30cd7966981d6e84cf74202 not found: ID does not exist" Oct 09 09:51:30 crc kubenswrapper[4710]: I1009 09:51:30.825942 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7aa8b50-9d7d-4735-8e06-0931decf7a95" path="/var/lib/kubelet/pods/a7aa8b50-9d7d-4735-8e06-0931decf7a95/volumes" Oct 09 09:51:31 crc kubenswrapper[4710]: I1009 09:51:31.002871 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2wqpd"] Oct 09 09:51:31 crc kubenswrapper[4710]: E1009 09:51:31.003385 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7aa8b50-9d7d-4735-8e06-0931decf7a95" containerName="extract-utilities" Oct 09 09:51:31 crc kubenswrapper[4710]: I1009 09:51:31.003413 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7aa8b50-9d7d-4735-8e06-0931decf7a95" containerName="extract-utilities" Oct 09 09:51:31 crc kubenswrapper[4710]: E1009 09:51:31.003457 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7aa8b50-9d7d-4735-8e06-0931decf7a95" containerName="registry-server" Oct 09 09:51:31 crc kubenswrapper[4710]: I1009 09:51:31.003465 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7aa8b50-9d7d-4735-8e06-0931decf7a95" containerName="registry-server" Oct 09 09:51:31 crc kubenswrapper[4710]: E1009 09:51:31.003510 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7aa8b50-9d7d-4735-8e06-0931decf7a95" containerName="extract-content" Oct 09 09:51:31 crc kubenswrapper[4710]: I1009 09:51:31.003516 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7aa8b50-9d7d-4735-8e06-0931decf7a95" containerName="extract-content" Oct 09 09:51:31 crc kubenswrapper[4710]: I1009 09:51:31.003772 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7aa8b50-9d7d-4735-8e06-0931decf7a95" containerName="registry-server" Oct 09 09:51:31 crc kubenswrapper[4710]: I1009 09:51:31.009044 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2wqpd" Oct 09 09:51:31 crc kubenswrapper[4710]: I1009 09:51:31.036978 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2wqpd"] Oct 09 09:51:31 crc kubenswrapper[4710]: I1009 09:51:31.160158 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqkts\" (UniqueName: \"kubernetes.io/projected/e4ee793c-672c-48e9-ae90-5741b230c701-kube-api-access-bqkts\") pod \"certified-operators-2wqpd\" (UID: \"e4ee793c-672c-48e9-ae90-5741b230c701\") " pod="openshift-marketplace/certified-operators-2wqpd" Oct 09 09:51:31 crc kubenswrapper[4710]: I1009 09:51:31.160287 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4ee793c-672c-48e9-ae90-5741b230c701-catalog-content\") pod \"certified-operators-2wqpd\" (UID: \"e4ee793c-672c-48e9-ae90-5741b230c701\") " pod="openshift-marketplace/certified-operators-2wqpd" Oct 09 09:51:31 crc kubenswrapper[4710]: I1009 09:51:31.160321 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4ee793c-672c-48e9-ae90-5741b230c701-utilities\") pod \"certified-operators-2wqpd\" (UID: \"e4ee793c-672c-48e9-ae90-5741b230c701\") " pod="openshift-marketplace/certified-operators-2wqpd" Oct 09 09:51:31 crc kubenswrapper[4710]: I1009 09:51:31.262079 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4ee793c-672c-48e9-ae90-5741b230c701-catalog-content\") pod \"certified-operators-2wqpd\" (UID: \"e4ee793c-672c-48e9-ae90-5741b230c701\") " pod="openshift-marketplace/certified-operators-2wqpd" Oct 09 09:51:31 crc kubenswrapper[4710]: I1009 09:51:31.262145 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4ee793c-672c-48e9-ae90-5741b230c701-utilities\") pod \"certified-operators-2wqpd\" (UID: \"e4ee793c-672c-48e9-ae90-5741b230c701\") " pod="openshift-marketplace/certified-operators-2wqpd" Oct 09 09:51:31 crc kubenswrapper[4710]: I1009 09:51:31.262211 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqkts\" (UniqueName: \"kubernetes.io/projected/e4ee793c-672c-48e9-ae90-5741b230c701-kube-api-access-bqkts\") pod \"certified-operators-2wqpd\" (UID: \"e4ee793c-672c-48e9-ae90-5741b230c701\") " pod="openshift-marketplace/certified-operators-2wqpd" Oct 09 09:51:31 crc kubenswrapper[4710]: I1009 09:51:31.263140 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4ee793c-672c-48e9-ae90-5741b230c701-catalog-content\") pod \"certified-operators-2wqpd\" (UID: \"e4ee793c-672c-48e9-ae90-5741b230c701\") " pod="openshift-marketplace/certified-operators-2wqpd" Oct 09 09:51:31 crc kubenswrapper[4710]: I1009 09:51:31.263440 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4ee793c-672c-48e9-ae90-5741b230c701-utilities\") pod \"certified-operators-2wqpd\" (UID: \"e4ee793c-672c-48e9-ae90-5741b230c701\") " pod="openshift-marketplace/certified-operators-2wqpd" Oct 09 09:51:31 crc kubenswrapper[4710]: I1009 09:51:31.281972 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqkts\" (UniqueName: \"kubernetes.io/projected/e4ee793c-672c-48e9-ae90-5741b230c701-kube-api-access-bqkts\") pod \"certified-operators-2wqpd\" (UID: \"e4ee793c-672c-48e9-ae90-5741b230c701\") " pod="openshift-marketplace/certified-operators-2wqpd" Oct 09 09:51:31 crc kubenswrapper[4710]: I1009 09:51:31.329197 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2wqpd" Oct 09 09:51:31 crc kubenswrapper[4710]: I1009 09:51:31.825325 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2wqpd"] Oct 09 09:51:32 crc kubenswrapper[4710]: I1009 09:51:32.532312 4710 generic.go:334] "Generic (PLEG): container finished" podID="e4ee793c-672c-48e9-ae90-5741b230c701" containerID="46b22da546b659c0267db38f3ca7b756fdad480c49d99b052dd41aead6ea3c4d" exitCode=0 Oct 09 09:51:32 crc kubenswrapper[4710]: I1009 09:51:32.532418 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2wqpd" event={"ID":"e4ee793c-672c-48e9-ae90-5741b230c701","Type":"ContainerDied","Data":"46b22da546b659c0267db38f3ca7b756fdad480c49d99b052dd41aead6ea3c4d"} Oct 09 09:51:32 crc kubenswrapper[4710]: I1009 09:51:32.532761 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2wqpd" event={"ID":"e4ee793c-672c-48e9-ae90-5741b230c701","Type":"ContainerStarted","Data":"7c4d2120ee9a5c591fb4bf4cca949cc1c1f4f26d261a89aab61db7fb8b5fa3df"} Oct 09 09:51:33 crc kubenswrapper[4710]: I1009 09:51:33.545473 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2wqpd" event={"ID":"e4ee793c-672c-48e9-ae90-5741b230c701","Type":"ContainerStarted","Data":"d46ff2f8b59836dc938a26142e1733f817f184bc1e1491b68f10eb6aa4692bbf"} Oct 09 09:51:34 crc kubenswrapper[4710]: I1009 09:51:34.282542 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-dd45d474-nc99p" podUID="c574f93a-f78c-43c0-a159-797171f39d0b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.242:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.242:8443: connect: connection refused" Oct 09 09:51:34 crc kubenswrapper[4710]: I1009 09:51:34.341047 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-67d94d7dc8-fvmp7" podUID="13eb4841-8d3a-4ef6-a2da-656bab482ab4" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.243:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.243:8443: connect: connection refused" Oct 09 09:51:34 crc kubenswrapper[4710]: I1009 09:51:34.554197 4710 generic.go:334] "Generic (PLEG): container finished" podID="e4ee793c-672c-48e9-ae90-5741b230c701" containerID="d46ff2f8b59836dc938a26142e1733f817f184bc1e1491b68f10eb6aa4692bbf" exitCode=0 Oct 09 09:51:34 crc kubenswrapper[4710]: I1009 09:51:34.554259 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2wqpd" event={"ID":"e4ee793c-672c-48e9-ae90-5741b230c701","Type":"ContainerDied","Data":"d46ff2f8b59836dc938a26142e1733f817f184bc1e1491b68f10eb6aa4692bbf"} Oct 09 09:51:35 crc kubenswrapper[4710]: I1009 09:51:35.565851 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2wqpd" event={"ID":"e4ee793c-672c-48e9-ae90-5741b230c701","Type":"ContainerStarted","Data":"758cd9d445c61acd84c43c3fcbdacc002e1caf01e3ed5f5d4cc20f299d899b5f"} Oct 09 09:51:35 crc kubenswrapper[4710]: I1009 09:51:35.568456 4710 generic.go:334] "Generic (PLEG): container finished" podID="4cdc5804-ffca-4474-a386-1f803b4a1c23" containerID="5cdee0ea65674be2a87b16dbbd851745808c152f848a4f445fd99095cf5a10a1" exitCode=0 Oct 09 09:51:35 crc kubenswrapper[4710]: I1009 09:51:35.568514 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-qdvtf" event={"ID":"4cdc5804-ffca-4474-a386-1f803b4a1c23","Type":"ContainerDied","Data":"5cdee0ea65674be2a87b16dbbd851745808c152f848a4f445fd99095cf5a10a1"} Oct 09 09:51:35 crc kubenswrapper[4710]: I1009 09:51:35.584763 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2wqpd" podStartSLOduration=3.071951985 podStartE2EDuration="5.584749312s" podCreationTimestamp="2025-10-09 09:51:30 +0000 UTC" firstStartedPulling="2025-10-09 09:51:32.534846359 +0000 UTC m=+2816.024954756" lastFinishedPulling="2025-10-09 09:51:35.047643686 +0000 UTC m=+2818.537752083" observedRunningTime="2025-10-09 09:51:35.581608809 +0000 UTC m=+2819.071717206" watchObservedRunningTime="2025-10-09 09:51:35.584749312 +0000 UTC m=+2819.074857709" Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.137161 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-qdvtf" Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.257660 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/4cdc5804-ffca-4474-a386-1f803b4a1c23-job-config-data\") pod \"4cdc5804-ffca-4474-a386-1f803b4a1c23\" (UID: \"4cdc5804-ffca-4474-a386-1f803b4a1c23\") " Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.257720 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cdc5804-ffca-4474-a386-1f803b4a1c23-config-data\") pod \"4cdc5804-ffca-4474-a386-1f803b4a1c23\" (UID: \"4cdc5804-ffca-4474-a386-1f803b4a1c23\") " Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.257801 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htffv\" (UniqueName: \"kubernetes.io/projected/4cdc5804-ffca-4474-a386-1f803b4a1c23-kube-api-access-htffv\") pod \"4cdc5804-ffca-4474-a386-1f803b4a1c23\" (UID: \"4cdc5804-ffca-4474-a386-1f803b4a1c23\") " Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.257974 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cdc5804-ffca-4474-a386-1f803b4a1c23-combined-ca-bundle\") pod \"4cdc5804-ffca-4474-a386-1f803b4a1c23\" (UID: \"4cdc5804-ffca-4474-a386-1f803b4a1c23\") " Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.265604 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cdc5804-ffca-4474-a386-1f803b4a1c23-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "4cdc5804-ffca-4474-a386-1f803b4a1c23" (UID: "4cdc5804-ffca-4474-a386-1f803b4a1c23"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.286223 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4cdc5804-ffca-4474-a386-1f803b4a1c23-kube-api-access-htffv" (OuterVolumeSpecName: "kube-api-access-htffv") pod "4cdc5804-ffca-4474-a386-1f803b4a1c23" (UID: "4cdc5804-ffca-4474-a386-1f803b4a1c23"). InnerVolumeSpecName "kube-api-access-htffv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.287149 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cdc5804-ffca-4474-a386-1f803b4a1c23-config-data" (OuterVolumeSpecName: "config-data") pod "4cdc5804-ffca-4474-a386-1f803b4a1c23" (UID: "4cdc5804-ffca-4474-a386-1f803b4a1c23"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.338614 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cdc5804-ffca-4474-a386-1f803b4a1c23-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4cdc5804-ffca-4474-a386-1f803b4a1c23" (UID: "4cdc5804-ffca-4474-a386-1f803b4a1c23"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.360069 4710 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/4cdc5804-ffca-4474-a386-1f803b4a1c23-job-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.360093 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cdc5804-ffca-4474-a386-1f803b4a1c23-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.360102 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htffv\" (UniqueName: \"kubernetes.io/projected/4cdc5804-ffca-4474-a386-1f803b4a1c23-kube-api-access-htffv\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.360112 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cdc5804-ffca-4474-a386-1f803b4a1c23-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.618970 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-qdvtf" event={"ID":"4cdc5804-ffca-4474-a386-1f803b4a1c23","Type":"ContainerDied","Data":"ffd88338728ec340ee1210fbca579dcb576153e32e6cadfd503160a389cd7ed5"} Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.619027 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ffd88338728ec340ee1210fbca579dcb576153e32e6cadfd503160a389cd7ed5" Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.619116 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-qdvtf" Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.955942 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Oct 09 09:51:37 crc kubenswrapper[4710]: E1009 09:51:37.956354 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cdc5804-ffca-4474-a386-1f803b4a1c23" containerName="manila-db-sync" Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.956368 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cdc5804-ffca-4474-a386-1f803b4a1c23" containerName="manila-db-sync" Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.956547 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cdc5804-ffca-4474-a386-1f803b4a1c23" containerName="manila-db-sync" Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.957528 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.969196 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.969395 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.969618 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-9bhwl" Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.976063 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.977350 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.999375 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Oct 09 09:51:37 crc kubenswrapper[4710]: I1009 09:51:37.999922 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.004802 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.021581 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.079989 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/169f03f2-5865-4b76-8042-48c843181f71-ceph\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.080063 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.080088 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-scripts\") pod \"manila-scheduler-0\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " pod="openstack/manila-scheduler-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.080119 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xb8bm\" (UniqueName: \"kubernetes.io/projected/169f03f2-5865-4b76-8042-48c843181f71-kube-api-access-xb8bm\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.080150 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.080184 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/169f03f2-5865-4b76-8042-48c843181f71-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.080208 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/169f03f2-5865-4b76-8042-48c843181f71-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.080254 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " pod="openstack/manila-scheduler-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.080285 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mj8dp\" (UniqueName: \"kubernetes.io/projected/233de1cb-6266-4ce6-934b-45bd5ce38b8a-kube-api-access-mj8dp\") pod \"manila-scheduler-0\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " pod="openstack/manila-scheduler-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.080305 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-scripts\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.080330 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-config-data\") pod \"manila-scheduler-0\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " pod="openstack/manila-scheduler-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.080346 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " pod="openstack/manila-scheduler-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.080363 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/233de1cb-6266-4ce6-934b-45bd5ce38b8a-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " pod="openstack/manila-scheduler-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.080382 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-config-data\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.135508 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7887c4559f-7d5cl"] Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.137220 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.163537 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7887c4559f-7d5cl"] Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.186959 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xb8bm\" (UniqueName: \"kubernetes.io/projected/169f03f2-5865-4b76-8042-48c843181f71-kube-api-access-xb8bm\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.187365 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.187455 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/169f03f2-5865-4b76-8042-48c843181f71-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.187481 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/169f03f2-5865-4b76-8042-48c843181f71-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.187678 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " pod="openstack/manila-scheduler-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.187745 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mj8dp\" (UniqueName: \"kubernetes.io/projected/233de1cb-6266-4ce6-934b-45bd5ce38b8a-kube-api-access-mj8dp\") pod \"manila-scheduler-0\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " pod="openstack/manila-scheduler-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.187769 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-scripts\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.187790 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/169f03f2-5865-4b76-8042-48c843181f71-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.187844 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-config-data\") pod \"manila-scheduler-0\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " pod="openstack/manila-scheduler-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.187889 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " pod="openstack/manila-scheduler-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.187915 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/233de1cb-6266-4ce6-934b-45bd5ce38b8a-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " pod="openstack/manila-scheduler-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.187986 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-config-data\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.188059 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/169f03f2-5865-4b76-8042-48c843181f71-ceph\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.188084 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.188133 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-scripts\") pod \"manila-scheduler-0\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " pod="openstack/manila-scheduler-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.188890 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/169f03f2-5865-4b76-8042-48c843181f71-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.189757 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/233de1cb-6266-4ce6-934b-45bd5ce38b8a-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " pod="openstack/manila-scheduler-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.199348 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " pod="openstack/manila-scheduler-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.199753 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-scripts\") pod \"manila-scheduler-0\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " pod="openstack/manila-scheduler-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.215448 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " pod="openstack/manila-scheduler-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.217100 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.219964 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.220634 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-config-data\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.220741 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-scripts\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.229126 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-config-data\") pod \"manila-scheduler-0\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " pod="openstack/manila-scheduler-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.231670 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/169f03f2-5865-4b76-8042-48c843181f71-ceph\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.232922 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xb8bm\" (UniqueName: \"kubernetes.io/projected/169f03f2-5865-4b76-8042-48c843181f71-kube-api-access-xb8bm\") pod \"manila-share-share1-0\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.257408 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mj8dp\" (UniqueName: \"kubernetes.io/projected/233de1cb-6266-4ce6-934b-45bd5ce38b8a-kube-api-access-mj8dp\") pod \"manila-scheduler-0\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " pod="openstack/manila-scheduler-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.291275 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/892059c2-6b0b-48d9-ba51-fb86b0856c4a-ovsdbserver-nb\") pod \"dnsmasq-dns-7887c4559f-7d5cl\" (UID: \"892059c2-6b0b-48d9-ba51-fb86b0856c4a\") " pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.291355 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4t5xj\" (UniqueName: \"kubernetes.io/projected/892059c2-6b0b-48d9-ba51-fb86b0856c4a-kube-api-access-4t5xj\") pod \"dnsmasq-dns-7887c4559f-7d5cl\" (UID: \"892059c2-6b0b-48d9-ba51-fb86b0856c4a\") " pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.291483 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/892059c2-6b0b-48d9-ba51-fb86b0856c4a-openstack-edpm-ipam\") pod \"dnsmasq-dns-7887c4559f-7d5cl\" (UID: \"892059c2-6b0b-48d9-ba51-fb86b0856c4a\") " pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.291609 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/892059c2-6b0b-48d9-ba51-fb86b0856c4a-ovsdbserver-sb\") pod \"dnsmasq-dns-7887c4559f-7d5cl\" (UID: \"892059c2-6b0b-48d9-ba51-fb86b0856c4a\") " pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.291633 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/892059c2-6b0b-48d9-ba51-fb86b0856c4a-dns-svc\") pod \"dnsmasq-dns-7887c4559f-7d5cl\" (UID: \"892059c2-6b0b-48d9-ba51-fb86b0856c4a\") " pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.291779 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/892059c2-6b0b-48d9-ba51-fb86b0856c4a-config\") pod \"dnsmasq-dns-7887c4559f-7d5cl\" (UID: \"892059c2-6b0b-48d9-ba51-fb86b0856c4a\") " pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.297113 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.298591 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.299814 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.302122 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.310954 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.343799 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.393761 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/892059c2-6b0b-48d9-ba51-fb86b0856c4a-config\") pod \"dnsmasq-dns-7887c4559f-7d5cl\" (UID: \"892059c2-6b0b-48d9-ba51-fb86b0856c4a\") " pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.393889 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/892059c2-6b0b-48d9-ba51-fb86b0856c4a-ovsdbserver-nb\") pod \"dnsmasq-dns-7887c4559f-7d5cl\" (UID: \"892059c2-6b0b-48d9-ba51-fb86b0856c4a\") " pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.393940 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wcv4\" (UniqueName: \"kubernetes.io/projected/841dcb8a-a7a6-4d7e-9558-c8708209cd33-kube-api-access-4wcv4\") pod \"manila-api-0\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.393965 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4t5xj\" (UniqueName: \"kubernetes.io/projected/892059c2-6b0b-48d9-ba51-fb86b0856c4a-kube-api-access-4t5xj\") pod \"dnsmasq-dns-7887c4559f-7d5cl\" (UID: \"892059c2-6b0b-48d9-ba51-fb86b0856c4a\") " pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.394019 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/841dcb8a-a7a6-4d7e-9558-c8708209cd33-etc-machine-id\") pod \"manila-api-0\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.394074 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/892059c2-6b0b-48d9-ba51-fb86b0856c4a-openstack-edpm-ipam\") pod \"dnsmasq-dns-7887c4559f-7d5cl\" (UID: \"892059c2-6b0b-48d9-ba51-fb86b0856c4a\") " pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.394119 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.394155 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-config-data\") pod \"manila-api-0\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.394181 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-scripts\") pod \"manila-api-0\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.394195 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-config-data-custom\") pod \"manila-api-0\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.394228 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/892059c2-6b0b-48d9-ba51-fb86b0856c4a-ovsdbserver-sb\") pod \"dnsmasq-dns-7887c4559f-7d5cl\" (UID: \"892059c2-6b0b-48d9-ba51-fb86b0856c4a\") " pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.394257 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/892059c2-6b0b-48d9-ba51-fb86b0856c4a-dns-svc\") pod \"dnsmasq-dns-7887c4559f-7d5cl\" (UID: \"892059c2-6b0b-48d9-ba51-fb86b0856c4a\") " pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.394311 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/841dcb8a-a7a6-4d7e-9558-c8708209cd33-logs\") pod \"manila-api-0\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.395215 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/892059c2-6b0b-48d9-ba51-fb86b0856c4a-config\") pod \"dnsmasq-dns-7887c4559f-7d5cl\" (UID: \"892059c2-6b0b-48d9-ba51-fb86b0856c4a\") " pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.396478 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/892059c2-6b0b-48d9-ba51-fb86b0856c4a-openstack-edpm-ipam\") pod \"dnsmasq-dns-7887c4559f-7d5cl\" (UID: \"892059c2-6b0b-48d9-ba51-fb86b0856c4a\") " pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.396519 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/892059c2-6b0b-48d9-ba51-fb86b0856c4a-ovsdbserver-sb\") pod \"dnsmasq-dns-7887c4559f-7d5cl\" (UID: \"892059c2-6b0b-48d9-ba51-fb86b0856c4a\") " pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.396643 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/892059c2-6b0b-48d9-ba51-fb86b0856c4a-ovsdbserver-nb\") pod \"dnsmasq-dns-7887c4559f-7d5cl\" (UID: \"892059c2-6b0b-48d9-ba51-fb86b0856c4a\") " pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.397046 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/892059c2-6b0b-48d9-ba51-fb86b0856c4a-dns-svc\") pod \"dnsmasq-dns-7887c4559f-7d5cl\" (UID: \"892059c2-6b0b-48d9-ba51-fb86b0856c4a\") " pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.431239 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4t5xj\" (UniqueName: \"kubernetes.io/projected/892059c2-6b0b-48d9-ba51-fb86b0856c4a-kube-api-access-4t5xj\") pod \"dnsmasq-dns-7887c4559f-7d5cl\" (UID: \"892059c2-6b0b-48d9-ba51-fb86b0856c4a\") " pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.480250 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.502115 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-config-data\") pod \"manila-api-0\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.502362 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-scripts\") pod \"manila-api-0\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.502382 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-config-data-custom\") pod \"manila-api-0\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.502464 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/841dcb8a-a7a6-4d7e-9558-c8708209cd33-logs\") pod \"manila-api-0\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.502556 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wcv4\" (UniqueName: \"kubernetes.io/projected/841dcb8a-a7a6-4d7e-9558-c8708209cd33-kube-api-access-4wcv4\") pod \"manila-api-0\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.502604 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/841dcb8a-a7a6-4d7e-9558-c8708209cd33-etc-machine-id\") pod \"manila-api-0\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.502641 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.509285 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/841dcb8a-a7a6-4d7e-9558-c8708209cd33-logs\") pod \"manila-api-0\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.511550 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/841dcb8a-a7a6-4d7e-9558-c8708209cd33-etc-machine-id\") pod \"manila-api-0\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.514356 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.533523 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-config-data\") pod \"manila-api-0\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.537063 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-config-data-custom\") pod \"manila-api-0\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.541994 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wcv4\" (UniqueName: \"kubernetes.io/projected/841dcb8a-a7a6-4d7e-9558-c8708209cd33-kube-api-access-4wcv4\") pod \"manila-api-0\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.542099 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-scripts\") pod \"manila-api-0\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.686013 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 09 09:51:38 crc kubenswrapper[4710]: I1009 09:51:38.973180 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Oct 09 09:51:39 crc kubenswrapper[4710]: I1009 09:51:39.179966 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Oct 09 09:51:39 crc kubenswrapper[4710]: I1009 09:51:39.254221 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7887c4559f-7d5cl"] Oct 09 09:51:39 crc kubenswrapper[4710]: I1009 09:51:39.486235 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Oct 09 09:51:39 crc kubenswrapper[4710]: I1009 09:51:39.693604 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"841dcb8a-a7a6-4d7e-9558-c8708209cd33","Type":"ContainerStarted","Data":"554cda6b20b3e792e527da872b9e6eab2559e38339b97c1f3eadca3cccd0067e"} Oct 09 09:51:39 crc kubenswrapper[4710]: I1009 09:51:39.695529 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"169f03f2-5865-4b76-8042-48c843181f71","Type":"ContainerStarted","Data":"967eb772fcb7d5b303d6a045adc43f380654dda07cc49dba2a542a6c9d45a55f"} Oct 09 09:51:39 crc kubenswrapper[4710]: I1009 09:51:39.696850 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"233de1cb-6266-4ce6-934b-45bd5ce38b8a","Type":"ContainerStarted","Data":"b87a7e7d867ce262f0ce13b4bf518bd06c6400b1757b67bc29216f1a077fe40a"} Oct 09 09:51:39 crc kubenswrapper[4710]: I1009 09:51:39.703538 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" event={"ID":"892059c2-6b0b-48d9-ba51-fb86b0856c4a","Type":"ContainerStarted","Data":"975072d9e8c6bea41eae2f454818aaedf954980b91a9e01df9719fb6bbe151bf"} Oct 09 09:51:39 crc kubenswrapper[4710]: I1009 09:51:39.703588 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" event={"ID":"892059c2-6b0b-48d9-ba51-fb86b0856c4a","Type":"ContainerStarted","Data":"ce63b258ba77058864294aa56f5736a4f69ccd62b41e0db9a5d65c302351dcc6"} Oct 09 09:51:40 crc kubenswrapper[4710]: I1009 09:51:40.722462 4710 generic.go:334] "Generic (PLEG): container finished" podID="892059c2-6b0b-48d9-ba51-fb86b0856c4a" containerID="975072d9e8c6bea41eae2f454818aaedf954980b91a9e01df9719fb6bbe151bf" exitCode=0 Oct 09 09:51:40 crc kubenswrapper[4710]: I1009 09:51:40.722541 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" event={"ID":"892059c2-6b0b-48d9-ba51-fb86b0856c4a","Type":"ContainerDied","Data":"975072d9e8c6bea41eae2f454818aaedf954980b91a9e01df9719fb6bbe151bf"} Oct 09 09:51:40 crc kubenswrapper[4710]: I1009 09:51:40.731927 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"841dcb8a-a7a6-4d7e-9558-c8708209cd33","Type":"ContainerStarted","Data":"3f0747e8987810d11fa58627905b921b5decdb44dce5c366d7adac010728b757"} Oct 09 09:51:40 crc kubenswrapper[4710]: I1009 09:51:40.731962 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"841dcb8a-a7a6-4d7e-9558-c8708209cd33","Type":"ContainerStarted","Data":"ca7bf1294518026d25721aa438136a1e4dd3a7e9683cce15862782a82ddc203f"} Oct 09 09:51:40 crc kubenswrapper[4710]: I1009 09:51:40.732329 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Oct 09 09:51:40 crc kubenswrapper[4710]: I1009 09:51:40.792068 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=2.79204884 podStartE2EDuration="2.79204884s" podCreationTimestamp="2025-10-09 09:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:51:40.785787289 +0000 UTC m=+2824.275895686" watchObservedRunningTime="2025-10-09 09:51:40.79204884 +0000 UTC m=+2824.282157237" Oct 09 09:51:40 crc kubenswrapper[4710]: I1009 09:51:40.845661 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-api-0"] Oct 09 09:51:41 crc kubenswrapper[4710]: I1009 09:51:41.329919 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2wqpd" Oct 09 09:51:41 crc kubenswrapper[4710]: I1009 09:51:41.329969 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2wqpd" Oct 09 09:51:41 crc kubenswrapper[4710]: I1009 09:51:41.415734 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2wqpd" Oct 09 09:51:41 crc kubenswrapper[4710]: I1009 09:51:41.746349 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"233de1cb-6266-4ce6-934b-45bd5ce38b8a","Type":"ContainerStarted","Data":"8c3466e6a499eed150beda0266822efc812c4011e237d49cc10bf95609c36c1b"} Oct 09 09:51:41 crc kubenswrapper[4710]: I1009 09:51:41.746710 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"233de1cb-6266-4ce6-934b-45bd5ce38b8a","Type":"ContainerStarted","Data":"4baefd534c0781d8369b767c8a18d1b5e8dd219aab9d69b6be3517aa15a8133a"} Oct 09 09:51:41 crc kubenswrapper[4710]: I1009 09:51:41.754358 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" event={"ID":"892059c2-6b0b-48d9-ba51-fb86b0856c4a","Type":"ContainerStarted","Data":"c7950bdf081c178855be6628ef6bbd22ac7f4e33824a09724bb9447c50fd9a92"} Oct 09 09:51:41 crc kubenswrapper[4710]: I1009 09:51:41.755710 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" Oct 09 09:51:41 crc kubenswrapper[4710]: I1009 09:51:41.768659 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=3.527869005 podStartE2EDuration="4.768646789s" podCreationTimestamp="2025-10-09 09:51:37 +0000 UTC" firstStartedPulling="2025-10-09 09:51:38.981636361 +0000 UTC m=+2822.471744758" lastFinishedPulling="2025-10-09 09:51:40.222414145 +0000 UTC m=+2823.712522542" observedRunningTime="2025-10-09 09:51:41.766924941 +0000 UTC m=+2825.257033337" watchObservedRunningTime="2025-10-09 09:51:41.768646789 +0000 UTC m=+2825.258755186" Oct 09 09:51:41 crc kubenswrapper[4710]: I1009 09:51:41.795386 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" podStartSLOduration=3.795374655 podStartE2EDuration="3.795374655s" podCreationTimestamp="2025-10-09 09:51:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:51:41.786539729 +0000 UTC m=+2825.276648127" watchObservedRunningTime="2025-10-09 09:51:41.795374655 +0000 UTC m=+2825.285483052" Oct 09 09:51:41 crc kubenswrapper[4710]: I1009 09:51:41.817076 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2wqpd" Oct 09 09:51:41 crc kubenswrapper[4710]: I1009 09:51:41.914593 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2wqpd"] Oct 09 09:51:42 crc kubenswrapper[4710]: I1009 09:51:42.762976 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-api-0" podUID="841dcb8a-a7a6-4d7e-9558-c8708209cd33" containerName="manila-api-log" containerID="cri-o://ca7bf1294518026d25721aa438136a1e4dd3a7e9683cce15862782a82ddc203f" gracePeriod=30 Oct 09 09:51:42 crc kubenswrapper[4710]: I1009 09:51:42.763780 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-api-0" podUID="841dcb8a-a7a6-4d7e-9558-c8708209cd33" containerName="manila-api" containerID="cri-o://3f0747e8987810d11fa58627905b921b5decdb44dce5c366d7adac010728b757" gracePeriod=30 Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.748177 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.779608 4710 generic.go:334] "Generic (PLEG): container finished" podID="841dcb8a-a7a6-4d7e-9558-c8708209cd33" containerID="3f0747e8987810d11fa58627905b921b5decdb44dce5c366d7adac010728b757" exitCode=0 Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.779644 4710 generic.go:334] "Generic (PLEG): container finished" podID="841dcb8a-a7a6-4d7e-9558-c8708209cd33" containerID="ca7bf1294518026d25721aa438136a1e4dd3a7e9683cce15862782a82ddc203f" exitCode=143 Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.779872 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2wqpd" podUID="e4ee793c-672c-48e9-ae90-5741b230c701" containerName="registry-server" containerID="cri-o://758cd9d445c61acd84c43c3fcbdacc002e1caf01e3ed5f5d4cc20f299d899b5f" gracePeriod=2 Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.779981 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.780612 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"841dcb8a-a7a6-4d7e-9558-c8708209cd33","Type":"ContainerDied","Data":"3f0747e8987810d11fa58627905b921b5decdb44dce5c366d7adac010728b757"} Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.780639 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"841dcb8a-a7a6-4d7e-9558-c8708209cd33","Type":"ContainerDied","Data":"ca7bf1294518026d25721aa438136a1e4dd3a7e9683cce15862782a82ddc203f"} Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.780660 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"841dcb8a-a7a6-4d7e-9558-c8708209cd33","Type":"ContainerDied","Data":"554cda6b20b3e792e527da872b9e6eab2559e38339b97c1f3eadca3cccd0067e"} Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.780676 4710 scope.go:117] "RemoveContainer" containerID="3f0747e8987810d11fa58627905b921b5decdb44dce5c366d7adac010728b757" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.848962 4710 scope.go:117] "RemoveContainer" containerID="ca7bf1294518026d25721aa438136a1e4dd3a7e9683cce15862782a82ddc203f" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.865947 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-scripts\") pod \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.866035 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-config-data-custom\") pod \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.866086 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-combined-ca-bundle\") pod \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.866333 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/841dcb8a-a7a6-4d7e-9558-c8708209cd33-logs\") pod \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.866420 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wcv4\" (UniqueName: \"kubernetes.io/projected/841dcb8a-a7a6-4d7e-9558-c8708209cd33-kube-api-access-4wcv4\") pod \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.866458 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-config-data\") pod \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.866486 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/841dcb8a-a7a6-4d7e-9558-c8708209cd33-etc-machine-id\") pod \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\" (UID: \"841dcb8a-a7a6-4d7e-9558-c8708209cd33\") " Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.866979 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/841dcb8a-a7a6-4d7e-9558-c8708209cd33-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "841dcb8a-a7a6-4d7e-9558-c8708209cd33" (UID: "841dcb8a-a7a6-4d7e-9558-c8708209cd33"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.866990 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/841dcb8a-a7a6-4d7e-9558-c8708209cd33-logs" (OuterVolumeSpecName: "logs") pod "841dcb8a-a7a6-4d7e-9558-c8708209cd33" (UID: "841dcb8a-a7a6-4d7e-9558-c8708209cd33"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.872836 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-scripts" (OuterVolumeSpecName: "scripts") pod "841dcb8a-a7a6-4d7e-9558-c8708209cd33" (UID: "841dcb8a-a7a6-4d7e-9558-c8708209cd33"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.878586 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/841dcb8a-a7a6-4d7e-9558-c8708209cd33-kube-api-access-4wcv4" (OuterVolumeSpecName: "kube-api-access-4wcv4") pod "841dcb8a-a7a6-4d7e-9558-c8708209cd33" (UID: "841dcb8a-a7a6-4d7e-9558-c8708209cd33"). InnerVolumeSpecName "kube-api-access-4wcv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.879872 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "841dcb8a-a7a6-4d7e-9558-c8708209cd33" (UID: "841dcb8a-a7a6-4d7e-9558-c8708209cd33"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.897036 4710 scope.go:117] "RemoveContainer" containerID="3f0747e8987810d11fa58627905b921b5decdb44dce5c366d7adac010728b757" Oct 09 09:51:43 crc kubenswrapper[4710]: E1009 09:51:43.900081 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f0747e8987810d11fa58627905b921b5decdb44dce5c366d7adac010728b757\": container with ID starting with 3f0747e8987810d11fa58627905b921b5decdb44dce5c366d7adac010728b757 not found: ID does not exist" containerID="3f0747e8987810d11fa58627905b921b5decdb44dce5c366d7adac010728b757" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.900118 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f0747e8987810d11fa58627905b921b5decdb44dce5c366d7adac010728b757"} err="failed to get container status \"3f0747e8987810d11fa58627905b921b5decdb44dce5c366d7adac010728b757\": rpc error: code = NotFound desc = could not find container \"3f0747e8987810d11fa58627905b921b5decdb44dce5c366d7adac010728b757\": container with ID starting with 3f0747e8987810d11fa58627905b921b5decdb44dce5c366d7adac010728b757 not found: ID does not exist" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.900145 4710 scope.go:117] "RemoveContainer" containerID="ca7bf1294518026d25721aa438136a1e4dd3a7e9683cce15862782a82ddc203f" Oct 09 09:51:43 crc kubenswrapper[4710]: E1009 09:51:43.902079 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca7bf1294518026d25721aa438136a1e4dd3a7e9683cce15862782a82ddc203f\": container with ID starting with ca7bf1294518026d25721aa438136a1e4dd3a7e9683cce15862782a82ddc203f not found: ID does not exist" containerID="ca7bf1294518026d25721aa438136a1e4dd3a7e9683cce15862782a82ddc203f" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.902113 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca7bf1294518026d25721aa438136a1e4dd3a7e9683cce15862782a82ddc203f"} err="failed to get container status \"ca7bf1294518026d25721aa438136a1e4dd3a7e9683cce15862782a82ddc203f\": rpc error: code = NotFound desc = could not find container \"ca7bf1294518026d25721aa438136a1e4dd3a7e9683cce15862782a82ddc203f\": container with ID starting with ca7bf1294518026d25721aa438136a1e4dd3a7e9683cce15862782a82ddc203f not found: ID does not exist" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.902138 4710 scope.go:117] "RemoveContainer" containerID="3f0747e8987810d11fa58627905b921b5decdb44dce5c366d7adac010728b757" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.902566 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f0747e8987810d11fa58627905b921b5decdb44dce5c366d7adac010728b757"} err="failed to get container status \"3f0747e8987810d11fa58627905b921b5decdb44dce5c366d7adac010728b757\": rpc error: code = NotFound desc = could not find container \"3f0747e8987810d11fa58627905b921b5decdb44dce5c366d7adac010728b757\": container with ID starting with 3f0747e8987810d11fa58627905b921b5decdb44dce5c366d7adac010728b757 not found: ID does not exist" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.902587 4710 scope.go:117] "RemoveContainer" containerID="ca7bf1294518026d25721aa438136a1e4dd3a7e9683cce15862782a82ddc203f" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.903553 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca7bf1294518026d25721aa438136a1e4dd3a7e9683cce15862782a82ddc203f"} err="failed to get container status \"ca7bf1294518026d25721aa438136a1e4dd3a7e9683cce15862782a82ddc203f\": rpc error: code = NotFound desc = could not find container \"ca7bf1294518026d25721aa438136a1e4dd3a7e9683cce15862782a82ddc203f\": container with ID starting with ca7bf1294518026d25721aa438136a1e4dd3a7e9683cce15862782a82ddc203f not found: ID does not exist" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.907696 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "841dcb8a-a7a6-4d7e-9558-c8708209cd33" (UID: "841dcb8a-a7a6-4d7e-9558-c8708209cd33"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.947573 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-config-data" (OuterVolumeSpecName: "config-data") pod "841dcb8a-a7a6-4d7e-9558-c8708209cd33" (UID: "841dcb8a-a7a6-4d7e-9558-c8708209cd33"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.968372 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.968399 4710 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.968410 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.968419 4710 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/841dcb8a-a7a6-4d7e-9558-c8708209cd33-logs\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.968441 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wcv4\" (UniqueName: \"kubernetes.io/projected/841dcb8a-a7a6-4d7e-9558-c8708209cd33-kube-api-access-4wcv4\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.968450 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/841dcb8a-a7a6-4d7e-9558-c8708209cd33-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:43 crc kubenswrapper[4710]: I1009 09:51:43.968459 4710 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/841dcb8a-a7a6-4d7e-9558-c8708209cd33-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.178302 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-api-0"] Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.191951 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-api-0"] Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.206501 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Oct 09 09:51:44 crc kubenswrapper[4710]: E1009 09:51:44.207260 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="841dcb8a-a7a6-4d7e-9558-c8708209cd33" containerName="manila-api" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.207285 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="841dcb8a-a7a6-4d7e-9558-c8708209cd33" containerName="manila-api" Oct 09 09:51:44 crc kubenswrapper[4710]: E1009 09:51:44.207495 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="841dcb8a-a7a6-4d7e-9558-c8708209cd33" containerName="manila-api-log" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.207512 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="841dcb8a-a7a6-4d7e-9558-c8708209cd33" containerName="manila-api-log" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.207731 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="841dcb8a-a7a6-4d7e-9558-c8708209cd33" containerName="manila-api-log" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.207747 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="841dcb8a-a7a6-4d7e-9558-c8708209cd33" containerName="manila-api" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.209131 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.214970 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-manila-public-svc" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.215180 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-manila-internal-svc" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.215308 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.216382 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.379631 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b107b875-5e01-45ce-8702-dcc78cd19193-config-data-custom\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.379954 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b107b875-5e01-45ce-8702-dcc78cd19193-scripts\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.380237 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b107b875-5e01-45ce-8702-dcc78cd19193-public-tls-certs\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.380271 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b107b875-5e01-45ce-8702-dcc78cd19193-logs\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.380341 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b107b875-5e01-45ce-8702-dcc78cd19193-config-data\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.380401 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkq8k\" (UniqueName: \"kubernetes.io/projected/b107b875-5e01-45ce-8702-dcc78cd19193-kube-api-access-lkq8k\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.380445 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b107b875-5e01-45ce-8702-dcc78cd19193-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.380602 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b107b875-5e01-45ce-8702-dcc78cd19193-internal-tls-certs\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.380711 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b107b875-5e01-45ce-8702-dcc78cd19193-etc-machine-id\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.405960 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2wqpd" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.483303 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqkts\" (UniqueName: \"kubernetes.io/projected/e4ee793c-672c-48e9-ae90-5741b230c701-kube-api-access-bqkts\") pod \"e4ee793c-672c-48e9-ae90-5741b230c701\" (UID: \"e4ee793c-672c-48e9-ae90-5741b230c701\") " Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.483448 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4ee793c-672c-48e9-ae90-5741b230c701-catalog-content\") pod \"e4ee793c-672c-48e9-ae90-5741b230c701\" (UID: \"e4ee793c-672c-48e9-ae90-5741b230c701\") " Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.483653 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4ee793c-672c-48e9-ae90-5741b230c701-utilities\") pod \"e4ee793c-672c-48e9-ae90-5741b230c701\" (UID: \"e4ee793c-672c-48e9-ae90-5741b230c701\") " Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.484062 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b107b875-5e01-45ce-8702-dcc78cd19193-config-data-custom\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.484087 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b107b875-5e01-45ce-8702-dcc78cd19193-scripts\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.484180 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b107b875-5e01-45ce-8702-dcc78cd19193-public-tls-certs\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.484195 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b107b875-5e01-45ce-8702-dcc78cd19193-logs\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.484222 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b107b875-5e01-45ce-8702-dcc78cd19193-config-data\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.484251 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b107b875-5e01-45ce-8702-dcc78cd19193-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.484267 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkq8k\" (UniqueName: \"kubernetes.io/projected/b107b875-5e01-45ce-8702-dcc78cd19193-kube-api-access-lkq8k\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.484318 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b107b875-5e01-45ce-8702-dcc78cd19193-internal-tls-certs\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.484355 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b107b875-5e01-45ce-8702-dcc78cd19193-etc-machine-id\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.484456 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b107b875-5e01-45ce-8702-dcc78cd19193-etc-machine-id\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.486360 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4ee793c-672c-48e9-ae90-5741b230c701-utilities" (OuterVolumeSpecName: "utilities") pod "e4ee793c-672c-48e9-ae90-5741b230c701" (UID: "e4ee793c-672c-48e9-ae90-5741b230c701"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.488042 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b107b875-5e01-45ce-8702-dcc78cd19193-logs\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.501185 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b107b875-5e01-45ce-8702-dcc78cd19193-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.511970 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b107b875-5e01-45ce-8702-dcc78cd19193-scripts\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.513051 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b107b875-5e01-45ce-8702-dcc78cd19193-public-tls-certs\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.513414 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b107b875-5e01-45ce-8702-dcc78cd19193-internal-tls-certs\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.514068 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b107b875-5e01-45ce-8702-dcc78cd19193-config-data\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.543936 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkq8k\" (UniqueName: \"kubernetes.io/projected/b107b875-5e01-45ce-8702-dcc78cd19193-kube-api-access-lkq8k\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.544282 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4ee793c-672c-48e9-ae90-5741b230c701-kube-api-access-bqkts" (OuterVolumeSpecName: "kube-api-access-bqkts") pod "e4ee793c-672c-48e9-ae90-5741b230c701" (UID: "e4ee793c-672c-48e9-ae90-5741b230c701"). InnerVolumeSpecName "kube-api-access-bqkts". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.548633 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4ee793c-672c-48e9-ae90-5741b230c701-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e4ee793c-672c-48e9-ae90-5741b230c701" (UID: "e4ee793c-672c-48e9-ae90-5741b230c701"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.558624 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b107b875-5e01-45ce-8702-dcc78cd19193-config-data-custom\") pod \"manila-api-0\" (UID: \"b107b875-5e01-45ce-8702-dcc78cd19193\") " pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.586481 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4ee793c-672c-48e9-ae90-5741b230c701-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.586883 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqkts\" (UniqueName: \"kubernetes.io/projected/e4ee793c-672c-48e9-ae90-5741b230c701-kube-api-access-bqkts\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.587011 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4ee793c-672c-48e9-ae90-5741b230c701-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.813512 4710 generic.go:334] "Generic (PLEG): container finished" podID="e4ee793c-672c-48e9-ae90-5741b230c701" containerID="758cd9d445c61acd84c43c3fcbdacc002e1caf01e3ed5f5d4cc20f299d899b5f" exitCode=0 Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.813610 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2wqpd" event={"ID":"e4ee793c-672c-48e9-ae90-5741b230c701","Type":"ContainerDied","Data":"758cd9d445c61acd84c43c3fcbdacc002e1caf01e3ed5f5d4cc20f299d899b5f"} Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.813659 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2wqpd" event={"ID":"e4ee793c-672c-48e9-ae90-5741b230c701","Type":"ContainerDied","Data":"7c4d2120ee9a5c591fb4bf4cca949cc1c1f4f26d261a89aab61db7fb8b5fa3df"} Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.813680 4710 scope.go:117] "RemoveContainer" containerID="758cd9d445c61acd84c43c3fcbdacc002e1caf01e3ed5f5d4cc20f299d899b5f" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.813822 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2wqpd" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.840128 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.842663 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="841dcb8a-a7a6-4d7e-9558-c8708209cd33" path="/var/lib/kubelet/pods/841dcb8a-a7a6-4d7e-9558-c8708209cd33/volumes" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.879517 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2wqpd"] Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.891687 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2wqpd"] Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.895647 4710 scope.go:117] "RemoveContainer" containerID="d46ff2f8b59836dc938a26142e1733f817f184bc1e1491b68f10eb6aa4692bbf" Oct 09 09:51:44 crc kubenswrapper[4710]: I1009 09:51:44.938173 4710 scope.go:117] "RemoveContainer" containerID="46b22da546b659c0267db38f3ca7b756fdad480c49d99b052dd41aead6ea3c4d" Oct 09 09:51:45 crc kubenswrapper[4710]: I1009 09:51:45.019852 4710 scope.go:117] "RemoveContainer" containerID="758cd9d445c61acd84c43c3fcbdacc002e1caf01e3ed5f5d4cc20f299d899b5f" Oct 09 09:51:45 crc kubenswrapper[4710]: E1009 09:51:45.052983 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"758cd9d445c61acd84c43c3fcbdacc002e1caf01e3ed5f5d4cc20f299d899b5f\": container with ID starting with 758cd9d445c61acd84c43c3fcbdacc002e1caf01e3ed5f5d4cc20f299d899b5f not found: ID does not exist" containerID="758cd9d445c61acd84c43c3fcbdacc002e1caf01e3ed5f5d4cc20f299d899b5f" Oct 09 09:51:45 crc kubenswrapper[4710]: I1009 09:51:45.053022 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"758cd9d445c61acd84c43c3fcbdacc002e1caf01e3ed5f5d4cc20f299d899b5f"} err="failed to get container status \"758cd9d445c61acd84c43c3fcbdacc002e1caf01e3ed5f5d4cc20f299d899b5f\": rpc error: code = NotFound desc = could not find container \"758cd9d445c61acd84c43c3fcbdacc002e1caf01e3ed5f5d4cc20f299d899b5f\": container with ID starting with 758cd9d445c61acd84c43c3fcbdacc002e1caf01e3ed5f5d4cc20f299d899b5f not found: ID does not exist" Oct 09 09:51:45 crc kubenswrapper[4710]: I1009 09:51:45.053067 4710 scope.go:117] "RemoveContainer" containerID="d46ff2f8b59836dc938a26142e1733f817f184bc1e1491b68f10eb6aa4692bbf" Oct 09 09:51:45 crc kubenswrapper[4710]: E1009 09:51:45.071744 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d46ff2f8b59836dc938a26142e1733f817f184bc1e1491b68f10eb6aa4692bbf\": container with ID starting with d46ff2f8b59836dc938a26142e1733f817f184bc1e1491b68f10eb6aa4692bbf not found: ID does not exist" containerID="d46ff2f8b59836dc938a26142e1733f817f184bc1e1491b68f10eb6aa4692bbf" Oct 09 09:51:45 crc kubenswrapper[4710]: I1009 09:51:45.095716 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d46ff2f8b59836dc938a26142e1733f817f184bc1e1491b68f10eb6aa4692bbf"} err="failed to get container status \"d46ff2f8b59836dc938a26142e1733f817f184bc1e1491b68f10eb6aa4692bbf\": rpc error: code = NotFound desc = could not find container \"d46ff2f8b59836dc938a26142e1733f817f184bc1e1491b68f10eb6aa4692bbf\": container with ID starting with d46ff2f8b59836dc938a26142e1733f817f184bc1e1491b68f10eb6aa4692bbf not found: ID does not exist" Oct 09 09:51:45 crc kubenswrapper[4710]: I1009 09:51:45.095940 4710 scope.go:117] "RemoveContainer" containerID="46b22da546b659c0267db38f3ca7b756fdad480c49d99b052dd41aead6ea3c4d" Oct 09 09:51:45 crc kubenswrapper[4710]: E1009 09:51:45.108876 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46b22da546b659c0267db38f3ca7b756fdad480c49d99b052dd41aead6ea3c4d\": container with ID starting with 46b22da546b659c0267db38f3ca7b756fdad480c49d99b052dd41aead6ea3c4d not found: ID does not exist" containerID="46b22da546b659c0267db38f3ca7b756fdad480c49d99b052dd41aead6ea3c4d" Oct 09 09:51:45 crc kubenswrapper[4710]: I1009 09:51:45.108930 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46b22da546b659c0267db38f3ca7b756fdad480c49d99b052dd41aead6ea3c4d"} err="failed to get container status \"46b22da546b659c0267db38f3ca7b756fdad480c49d99b052dd41aead6ea3c4d\": rpc error: code = NotFound desc = could not find container \"46b22da546b659c0267db38f3ca7b756fdad480c49d99b052dd41aead6ea3c4d\": container with ID starting with 46b22da546b659c0267db38f3ca7b756fdad480c49d99b052dd41aead6ea3c4d not found: ID does not exist" Oct 09 09:51:45 crc kubenswrapper[4710]: I1009 09:51:45.483826 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Oct 09 09:51:45 crc kubenswrapper[4710]: I1009 09:51:45.867398 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"b107b875-5e01-45ce-8702-dcc78cd19193","Type":"ContainerStarted","Data":"6bb97c556c8275ad92f71ab4c340835e665fd409f49848c2c06e5bf3aac32900"} Oct 09 09:51:45 crc kubenswrapper[4710]: I1009 09:51:45.923874 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:51:45 crc kubenswrapper[4710]: I1009 09:51:45.924507 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f8f55b62-b62b-4571-bee9-8af830b6f8f0" containerName="sg-core" containerID="cri-o://790eafc42b4544f6036f7ca62edac0dc58b89a6318dd95ba82d5755143bfcec5" gracePeriod=30 Oct 09 09:51:45 crc kubenswrapper[4710]: I1009 09:51:45.924559 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f8f55b62-b62b-4571-bee9-8af830b6f8f0" containerName="ceilometer-notification-agent" containerID="cri-o://9193356d85269ddeb233452187c77556505b3e97645cc1c6b911a04df1eb8f90" gracePeriod=30 Oct 09 09:51:45 crc kubenswrapper[4710]: I1009 09:51:45.924936 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f8f55b62-b62b-4571-bee9-8af830b6f8f0" containerName="proxy-httpd" containerID="cri-o://cbfec75f5679b7e131e87c217ceb79f5091c1c68b8de8f71af2f81bc1b74eb55" gracePeriod=30 Oct 09 09:51:45 crc kubenswrapper[4710]: I1009 09:51:45.924322 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f8f55b62-b62b-4571-bee9-8af830b6f8f0" containerName="ceilometer-central-agent" containerID="cri-o://8609a1b6561465621721783889e5843e5fa6e6aff66724c5e99f03c8c5c0bef5" gracePeriod=30 Oct 09 09:51:46 crc kubenswrapper[4710]: W1009 09:51:46.437733 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod841dcb8a_a7a6_4d7e_9558_c8708209cd33.slice/crio-ca7bf1294518026d25721aa438136a1e4dd3a7e9683cce15862782a82ddc203f.scope WatchSource:0}: Error finding container ca7bf1294518026d25721aa438136a1e4dd3a7e9683cce15862782a82ddc203f: Status 404 returned error can't find the container with id ca7bf1294518026d25721aa438136a1e4dd3a7e9683cce15862782a82ddc203f Oct 09 09:51:46 crc kubenswrapper[4710]: W1009 09:51:46.438050 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod841dcb8a_a7a6_4d7e_9558_c8708209cd33.slice/crio-3f0747e8987810d11fa58627905b921b5decdb44dce5c366d7adac010728b757.scope WatchSource:0}: Error finding container 3f0747e8987810d11fa58627905b921b5decdb44dce5c366d7adac010728b757: Status 404 returned error can't find the container with id 3f0747e8987810d11fa58627905b921b5decdb44dce5c366d7adac010728b757 Oct 09 09:51:46 crc kubenswrapper[4710]: I1009 09:51:46.841091 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4ee793c-672c-48e9-ae90-5741b230c701" path="/var/lib/kubelet/pods/e4ee793c-672c-48e9-ae90-5741b230c701/volumes" Oct 09 09:51:46 crc kubenswrapper[4710]: I1009 09:51:46.916297 4710 generic.go:334] "Generic (PLEG): container finished" podID="a092a4bc-968b-408e-a835-0180661e24e6" containerID="8c457c956c70edbd7efe99b90e4dd821617879f24090251a2db3e16d2faf751d" exitCode=137 Oct 09 09:51:46 crc kubenswrapper[4710]: I1009 09:51:46.916580 4710 generic.go:334] "Generic (PLEG): container finished" podID="a092a4bc-968b-408e-a835-0180661e24e6" containerID="709a0e716efd3a9a42ebb0fc33cc847a7fb581fe785592d69d215556d9df5058" exitCode=137 Oct 09 09:51:46 crc kubenswrapper[4710]: I1009 09:51:46.916625 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55df4dcbd5-bmlgb" event={"ID":"a092a4bc-968b-408e-a835-0180661e24e6","Type":"ContainerDied","Data":"8c457c956c70edbd7efe99b90e4dd821617879f24090251a2db3e16d2faf751d"} Oct 09 09:51:46 crc kubenswrapper[4710]: I1009 09:51:46.916656 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55df4dcbd5-bmlgb" event={"ID":"a092a4bc-968b-408e-a835-0180661e24e6","Type":"ContainerDied","Data":"709a0e716efd3a9a42ebb0fc33cc847a7fb581fe785592d69d215556d9df5058"} Oct 09 09:51:46 crc kubenswrapper[4710]: I1009 09:51:46.968754 4710 generic.go:334] "Generic (PLEG): container finished" podID="5643934e-9c27-457b-b0ad-5c8d5a635d26" containerID="ebbf23a6846675dfe200768beecc4f87011af3f0cd2254504b705224a5524ccf" exitCode=137 Oct 09 09:51:46 crc kubenswrapper[4710]: I1009 09:51:46.968784 4710 generic.go:334] "Generic (PLEG): container finished" podID="5643934e-9c27-457b-b0ad-5c8d5a635d26" containerID="454451ac6714dbe06258d42d89e9890f2d048b42d4188e0a6bd30fc1862de852" exitCode=137 Oct 09 09:51:46 crc kubenswrapper[4710]: I1009 09:51:46.968837 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-598b695687-fxzw5" event={"ID":"5643934e-9c27-457b-b0ad-5c8d5a635d26","Type":"ContainerDied","Data":"ebbf23a6846675dfe200768beecc4f87011af3f0cd2254504b705224a5524ccf"} Oct 09 09:51:46 crc kubenswrapper[4710]: I1009 09:51:46.968866 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-598b695687-fxzw5" event={"ID":"5643934e-9c27-457b-b0ad-5c8d5a635d26","Type":"ContainerDied","Data":"454451ac6714dbe06258d42d89e9890f2d048b42d4188e0a6bd30fc1862de852"} Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.011890 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"b107b875-5e01-45ce-8702-dcc78cd19193","Type":"ContainerStarted","Data":"a1978bbf4fa8133bd0d3adf739e9dc811bcf5ad8c5ba0bccbf06eb705ee4d6b4"} Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.011961 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"b107b875-5e01-45ce-8702-dcc78cd19193","Type":"ContainerStarted","Data":"80db7bff355018a5ebe704017d7ee824d38562dc81d10747d74014bba8d4d529"} Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.013511 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.033867 4710 generic.go:334] "Generic (PLEG): container finished" podID="f8f55b62-b62b-4571-bee9-8af830b6f8f0" containerID="cbfec75f5679b7e131e87c217ceb79f5091c1c68b8de8f71af2f81bc1b74eb55" exitCode=0 Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.033902 4710 generic.go:334] "Generic (PLEG): container finished" podID="f8f55b62-b62b-4571-bee9-8af830b6f8f0" containerID="790eafc42b4544f6036f7ca62edac0dc58b89a6318dd95ba82d5755143bfcec5" exitCode=2 Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.033921 4710 generic.go:334] "Generic (PLEG): container finished" podID="f8f55b62-b62b-4571-bee9-8af830b6f8f0" containerID="8609a1b6561465621721783889e5843e5fa6e6aff66724c5e99f03c8c5c0bef5" exitCode=0 Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.033947 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8f55b62-b62b-4571-bee9-8af830b6f8f0","Type":"ContainerDied","Data":"cbfec75f5679b7e131e87c217ceb79f5091c1c68b8de8f71af2f81bc1b74eb55"} Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.033975 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8f55b62-b62b-4571-bee9-8af830b6f8f0","Type":"ContainerDied","Data":"790eafc42b4544f6036f7ca62edac0dc58b89a6318dd95ba82d5755143bfcec5"} Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.033985 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8f55b62-b62b-4571-bee9-8af830b6f8f0","Type":"ContainerDied","Data":"8609a1b6561465621721783889e5843e5fa6e6aff66724c5e99f03c8c5c0bef5"} Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.048706 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=3.048693357 podStartE2EDuration="3.048693357s" podCreationTimestamp="2025-10-09 09:51:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:51:47.046239159 +0000 UTC m=+2830.536347555" watchObservedRunningTime="2025-10-09 09:51:47.048693357 +0000 UTC m=+2830.538801755" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.220472 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-598b695687-fxzw5" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.257576 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-55df4dcbd5-bmlgb" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.276354 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5643934e-9c27-457b-b0ad-5c8d5a635d26-config-data\") pod \"5643934e-9c27-457b-b0ad-5c8d5a635d26\" (UID: \"5643934e-9c27-457b-b0ad-5c8d5a635d26\") " Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.276614 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5643934e-9c27-457b-b0ad-5c8d5a635d26-logs\") pod \"5643934e-9c27-457b-b0ad-5c8d5a635d26\" (UID: \"5643934e-9c27-457b-b0ad-5c8d5a635d26\") " Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.276690 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5643934e-9c27-457b-b0ad-5c8d5a635d26-horizon-secret-key\") pod \"5643934e-9c27-457b-b0ad-5c8d5a635d26\" (UID: \"5643934e-9c27-457b-b0ad-5c8d5a635d26\") " Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.276717 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5wkfb\" (UniqueName: \"kubernetes.io/projected/5643934e-9c27-457b-b0ad-5c8d5a635d26-kube-api-access-5wkfb\") pod \"5643934e-9c27-457b-b0ad-5c8d5a635d26\" (UID: \"5643934e-9c27-457b-b0ad-5c8d5a635d26\") " Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.276828 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5643934e-9c27-457b-b0ad-5c8d5a635d26-scripts\") pod \"5643934e-9c27-457b-b0ad-5c8d5a635d26\" (UID: \"5643934e-9c27-457b-b0ad-5c8d5a635d26\") " Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.282617 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5643934e-9c27-457b-b0ad-5c8d5a635d26-logs" (OuterVolumeSpecName: "logs") pod "5643934e-9c27-457b-b0ad-5c8d5a635d26" (UID: "5643934e-9c27-457b-b0ad-5c8d5a635d26"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.314084 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5643934e-9c27-457b-b0ad-5c8d5a635d26-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "5643934e-9c27-457b-b0ad-5c8d5a635d26" (UID: "5643934e-9c27-457b-b0ad-5c8d5a635d26"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.321340 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5643934e-9c27-457b-b0ad-5c8d5a635d26-kube-api-access-5wkfb" (OuterVolumeSpecName: "kube-api-access-5wkfb") pod "5643934e-9c27-457b-b0ad-5c8d5a635d26" (UID: "5643934e-9c27-457b-b0ad-5c8d5a635d26"). InnerVolumeSpecName "kube-api-access-5wkfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.322733 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5643934e-9c27-457b-b0ad-5c8d5a635d26-config-data" (OuterVolumeSpecName: "config-data") pod "5643934e-9c27-457b-b0ad-5c8d5a635d26" (UID: "5643934e-9c27-457b-b0ad-5c8d5a635d26"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.342892 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5643934e-9c27-457b-b0ad-5c8d5a635d26-scripts" (OuterVolumeSpecName: "scripts") pod "5643934e-9c27-457b-b0ad-5c8d5a635d26" (UID: "5643934e-9c27-457b-b0ad-5c8d5a635d26"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.378335 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a092a4bc-968b-408e-a835-0180661e24e6-scripts\") pod \"a092a4bc-968b-408e-a835-0180661e24e6\" (UID: \"a092a4bc-968b-408e-a835-0180661e24e6\") " Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.378477 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgk9r\" (UniqueName: \"kubernetes.io/projected/a092a4bc-968b-408e-a835-0180661e24e6-kube-api-access-sgk9r\") pod \"a092a4bc-968b-408e-a835-0180661e24e6\" (UID: \"a092a4bc-968b-408e-a835-0180661e24e6\") " Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.378509 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a092a4bc-968b-408e-a835-0180661e24e6-config-data\") pod \"a092a4bc-968b-408e-a835-0180661e24e6\" (UID: \"a092a4bc-968b-408e-a835-0180661e24e6\") " Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.378614 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a092a4bc-968b-408e-a835-0180661e24e6-logs\") pod \"a092a4bc-968b-408e-a835-0180661e24e6\" (UID: \"a092a4bc-968b-408e-a835-0180661e24e6\") " Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.378694 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a092a4bc-968b-408e-a835-0180661e24e6-horizon-secret-key\") pod \"a092a4bc-968b-408e-a835-0180661e24e6\" (UID: \"a092a4bc-968b-408e-a835-0180661e24e6\") " Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.379264 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5643934e-9c27-457b-b0ad-5c8d5a635d26-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.379282 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5643934e-9c27-457b-b0ad-5c8d5a635d26-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.379290 4710 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5643934e-9c27-457b-b0ad-5c8d5a635d26-logs\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.379301 4710 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5643934e-9c27-457b-b0ad-5c8d5a635d26-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.379312 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5wkfb\" (UniqueName: \"kubernetes.io/projected/5643934e-9c27-457b-b0ad-5c8d5a635d26-kube-api-access-5wkfb\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.380246 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a092a4bc-968b-408e-a835-0180661e24e6-logs" (OuterVolumeSpecName: "logs") pod "a092a4bc-968b-408e-a835-0180661e24e6" (UID: "a092a4bc-968b-408e-a835-0180661e24e6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.394557 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a092a4bc-968b-408e-a835-0180661e24e6-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "a092a4bc-968b-408e-a835-0180661e24e6" (UID: "a092a4bc-968b-408e-a835-0180661e24e6"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.394718 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a092a4bc-968b-408e-a835-0180661e24e6-kube-api-access-sgk9r" (OuterVolumeSpecName: "kube-api-access-sgk9r") pod "a092a4bc-968b-408e-a835-0180661e24e6" (UID: "a092a4bc-968b-408e-a835-0180661e24e6"). InnerVolumeSpecName "kube-api-access-sgk9r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.405408 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a092a4bc-968b-408e-a835-0180661e24e6-scripts" (OuterVolumeSpecName: "scripts") pod "a092a4bc-968b-408e-a835-0180661e24e6" (UID: "a092a4bc-968b-408e-a835-0180661e24e6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.411727 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a092a4bc-968b-408e-a835-0180661e24e6-config-data" (OuterVolumeSpecName: "config-data") pod "a092a4bc-968b-408e-a835-0180661e24e6" (UID: "a092a4bc-968b-408e-a835-0180661e24e6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.481040 4710 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a092a4bc-968b-408e-a835-0180661e24e6-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.481069 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a092a4bc-968b-408e-a835-0180661e24e6-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.481078 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgk9r\" (UniqueName: \"kubernetes.io/projected/a092a4bc-968b-408e-a835-0180661e24e6-kube-api-access-sgk9r\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.481089 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a092a4bc-968b-408e-a835-0180661e24e6-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.481097 4710 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a092a4bc-968b-408e-a835-0180661e24e6-logs\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.897305 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:47 crc kubenswrapper[4710]: I1009 09:51:47.936760 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:51:48 crc kubenswrapper[4710]: I1009 09:51:48.045289 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-55df4dcbd5-bmlgb" Oct 09 09:51:48 crc kubenswrapper[4710]: I1009 09:51:48.045280 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-55df4dcbd5-bmlgb" event={"ID":"a092a4bc-968b-408e-a835-0180661e24e6","Type":"ContainerDied","Data":"ed027f9f95d1c34c739578a40c990e2039366f1d5f89543aeb54eebe56c07fba"} Oct 09 09:51:48 crc kubenswrapper[4710]: I1009 09:51:48.045366 4710 scope.go:117] "RemoveContainer" containerID="8c457c956c70edbd7efe99b90e4dd821617879f24090251a2db3e16d2faf751d" Oct 09 09:51:48 crc kubenswrapper[4710]: I1009 09:51:48.048967 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-598b695687-fxzw5" Oct 09 09:51:48 crc kubenswrapper[4710]: I1009 09:51:48.049709 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-598b695687-fxzw5" event={"ID":"5643934e-9c27-457b-b0ad-5c8d5a635d26","Type":"ContainerDied","Data":"18067ecd7f01c76c0c915baf416effdd48e6899eecb76ac8b60a6415f1981cfb"} Oct 09 09:51:48 crc kubenswrapper[4710]: I1009 09:51:48.142746 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-598b695687-fxzw5"] Oct 09 09:51:48 crc kubenswrapper[4710]: I1009 09:51:48.183207 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-598b695687-fxzw5"] Oct 09 09:51:48 crc kubenswrapper[4710]: I1009 09:51:48.231498 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-55df4dcbd5-bmlgb"] Oct 09 09:51:48 crc kubenswrapper[4710]: I1009 09:51:48.255471 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-55df4dcbd5-bmlgb"] Oct 09 09:51:48 crc kubenswrapper[4710]: I1009 09:51:48.301508 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Oct 09 09:51:48 crc kubenswrapper[4710]: I1009 09:51:48.482558 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7887c4559f-7d5cl" Oct 09 09:51:48 crc kubenswrapper[4710]: I1009 09:51:48.576754 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-867c8fd5c5-lmpr2"] Oct 09 09:51:48 crc kubenswrapper[4710]: I1009 09:51:48.577280 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" podUID="8485f386-dd23-4d8c-89de-dcb805d8d745" containerName="dnsmasq-dns" containerID="cri-o://71b324a27cf5c64b9578d498d815a5d83e42a31679941e92d217a2fa71181e6c" gracePeriod=10 Oct 09 09:51:48 crc kubenswrapper[4710]: I1009 09:51:48.824354 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5643934e-9c27-457b-b0ad-5c8d5a635d26" path="/var/lib/kubelet/pods/5643934e-9c27-457b-b0ad-5c8d5a635d26/volumes" Oct 09 09:51:48 crc kubenswrapper[4710]: I1009 09:51:48.825156 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a092a4bc-968b-408e-a835-0180661e24e6" path="/var/lib/kubelet/pods/a092a4bc-968b-408e-a835-0180661e24e6/volumes" Oct 09 09:51:49 crc kubenswrapper[4710]: I1009 09:51:49.076933 4710 generic.go:334] "Generic (PLEG): container finished" podID="8485f386-dd23-4d8c-89de-dcb805d8d745" containerID="71b324a27cf5c64b9578d498d815a5d83e42a31679941e92d217a2fa71181e6c" exitCode=0 Oct 09 09:51:49 crc kubenswrapper[4710]: I1009 09:51:49.077216 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" event={"ID":"8485f386-dd23-4d8c-89de-dcb805d8d745","Type":"ContainerDied","Data":"71b324a27cf5c64b9578d498d815a5d83e42a31679941e92d217a2fa71181e6c"} Oct 09 09:51:49 crc kubenswrapper[4710]: I1009 09:51:49.766677 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" podUID="8485f386-dd23-4d8c-89de-dcb805d8d745" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.194:5353: connect: connection refused" Oct 09 09:51:50 crc kubenswrapper[4710]: I1009 09:51:50.056357 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-67d94d7dc8-fvmp7" Oct 09 09:51:50 crc kubenswrapper[4710]: I1009 09:51:50.135249 4710 generic.go:334] "Generic (PLEG): container finished" podID="f8f55b62-b62b-4571-bee9-8af830b6f8f0" containerID="9193356d85269ddeb233452187c77556505b3e97645cc1c6b911a04df1eb8f90" exitCode=0 Oct 09 09:51:50 crc kubenswrapper[4710]: I1009 09:51:50.138485 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8f55b62-b62b-4571-bee9-8af830b6f8f0","Type":"ContainerDied","Data":"9193356d85269ddeb233452187c77556505b3e97645cc1c6b911a04df1eb8f90"} Oct 09 09:51:50 crc kubenswrapper[4710]: I1009 09:51:50.146200 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-dd45d474-nc99p"] Oct 09 09:51:50 crc kubenswrapper[4710]: I1009 09:51:50.146562 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-dd45d474-nc99p" podUID="c574f93a-f78c-43c0-a159-797171f39d0b" containerName="horizon-log" containerID="cri-o://d53c16c67c5b0020f557c562195ccfa873e3b2e20bbf3ea62771bea097dc7b4a" gracePeriod=30 Oct 09 09:51:50 crc kubenswrapper[4710]: I1009 09:51:50.147009 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-dd45d474-nc99p" podUID="c574f93a-f78c-43c0-a159-797171f39d0b" containerName="horizon" containerID="cri-o://07262d3590ed932bf374182e51c7e52eb519a990dfa2d33d6f54d6053d4c7dc2" gracePeriod=30 Oct 09 09:51:50 crc kubenswrapper[4710]: I1009 09:51:50.179760 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-dd45d474-nc99p" podUID="c574f93a-f78c-43c0-a159-797171f39d0b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.242:8443/dashboard/auth/login/?next=/dashboard/\": EOF" Oct 09 09:51:50 crc kubenswrapper[4710]: I1009 09:51:50.853360 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="f8f55b62-b62b-4571-bee9-8af830b6f8f0" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.186:3000/\": dial tcp 10.217.0.186:3000: connect: connection refused" Oct 09 09:51:51 crc kubenswrapper[4710]: I1009 09:51:51.693751 4710 scope.go:117] "RemoveContainer" containerID="709a0e716efd3a9a42ebb0fc33cc847a7fb581fe785592d69d215556d9df5058" Oct 09 09:51:51 crc kubenswrapper[4710]: I1009 09:51:51.812900 4710 scope.go:117] "RemoveContainer" containerID="ebbf23a6846675dfe200768beecc4f87011af3f0cd2254504b705224a5524ccf" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.082650 4710 scope.go:117] "RemoveContainer" containerID="454451ac6714dbe06258d42d89e9890f2d048b42d4188e0a6bd30fc1862de852" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.205488 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" event={"ID":"8485f386-dd23-4d8c-89de-dcb805d8d745","Type":"ContainerDied","Data":"4200b97673241f39cb15581e29b919cd075777aaf3e1fc1113bc21273d62dd55"} Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.205534 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4200b97673241f39cb15581e29b919cd075777aaf3e1fc1113bc21273d62dd55" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.211813 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f8f55b62-b62b-4571-bee9-8af830b6f8f0","Type":"ContainerDied","Data":"c1abd86898e30038637145051beef46d3f3f67394742e233b1282552313463a0"} Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.211853 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1abd86898e30038637145051beef46d3f3f67394742e233b1282552313463a0" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.232827 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.240723 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.377373 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-sg-core-conf-yaml\") pod \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.377456 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-ceilometer-tls-certs\") pod \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.377523 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-combined-ca-bundle\") pod \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.377543 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-config-data\") pod \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.377582 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-scripts\") pod \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.377634 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-openstack-edpm-ipam\") pod \"8485f386-dd23-4d8c-89de-dcb805d8d745\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.377747 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7qng\" (UniqueName: \"kubernetes.io/projected/8485f386-dd23-4d8c-89de-dcb805d8d745-kube-api-access-g7qng\") pod \"8485f386-dd23-4d8c-89de-dcb805d8d745\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.377776 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8f55b62-b62b-4571-bee9-8af830b6f8f0-run-httpd\") pod \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.377814 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-config\") pod \"8485f386-dd23-4d8c-89de-dcb805d8d745\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.377927 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-ovsdbserver-nb\") pod \"8485f386-dd23-4d8c-89de-dcb805d8d745\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.377980 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-ovsdbserver-sb\") pod \"8485f386-dd23-4d8c-89de-dcb805d8d745\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.378053 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8bxbx\" (UniqueName: \"kubernetes.io/projected/f8f55b62-b62b-4571-bee9-8af830b6f8f0-kube-api-access-8bxbx\") pod \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.378085 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-dns-svc\") pod \"8485f386-dd23-4d8c-89de-dcb805d8d745\" (UID: \"8485f386-dd23-4d8c-89de-dcb805d8d745\") " Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.378175 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8f55b62-b62b-4571-bee9-8af830b6f8f0-log-httpd\") pod \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\" (UID: \"f8f55b62-b62b-4571-bee9-8af830b6f8f0\") " Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.379670 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8f55b62-b62b-4571-bee9-8af830b6f8f0-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f8f55b62-b62b-4571-bee9-8af830b6f8f0" (UID: "f8f55b62-b62b-4571-bee9-8af830b6f8f0"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.384380 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8f55b62-b62b-4571-bee9-8af830b6f8f0-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f8f55b62-b62b-4571-bee9-8af830b6f8f0" (UID: "f8f55b62-b62b-4571-bee9-8af830b6f8f0"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.405577 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8485f386-dd23-4d8c-89de-dcb805d8d745-kube-api-access-g7qng" (OuterVolumeSpecName: "kube-api-access-g7qng") pod "8485f386-dd23-4d8c-89de-dcb805d8d745" (UID: "8485f386-dd23-4d8c-89de-dcb805d8d745"). InnerVolumeSpecName "kube-api-access-g7qng". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.409387 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8f55b62-b62b-4571-bee9-8af830b6f8f0-kube-api-access-8bxbx" (OuterVolumeSpecName: "kube-api-access-8bxbx") pod "f8f55b62-b62b-4571-bee9-8af830b6f8f0" (UID: "f8f55b62-b62b-4571-bee9-8af830b6f8f0"). InnerVolumeSpecName "kube-api-access-8bxbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.415005 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-scripts" (OuterVolumeSpecName: "scripts") pod "f8f55b62-b62b-4571-bee9-8af830b6f8f0" (UID: "f8f55b62-b62b-4571-bee9-8af830b6f8f0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.492158 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-config" (OuterVolumeSpecName: "config") pod "8485f386-dd23-4d8c-89de-dcb805d8d745" (UID: "8485f386-dd23-4d8c-89de-dcb805d8d745"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.492794 4710 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8f55b62-b62b-4571-bee9-8af830b6f8f0-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.492821 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.492835 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7qng\" (UniqueName: \"kubernetes.io/projected/8485f386-dd23-4d8c-89de-dcb805d8d745-kube-api-access-g7qng\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.492845 4710 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f8f55b62-b62b-4571-bee9-8af830b6f8f0-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.492854 4710 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-config\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.492863 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8bxbx\" (UniqueName: \"kubernetes.io/projected/f8f55b62-b62b-4571-bee9-8af830b6f8f0-kube-api-access-8bxbx\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.512524 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f8f55b62-b62b-4571-bee9-8af830b6f8f0" (UID: "f8f55b62-b62b-4571-bee9-8af830b6f8f0"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.520815 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8485f386-dd23-4d8c-89de-dcb805d8d745" (UID: "8485f386-dd23-4d8c-89de-dcb805d8d745"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.532128 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "f8f55b62-b62b-4571-bee9-8af830b6f8f0" (UID: "f8f55b62-b62b-4571-bee9-8af830b6f8f0"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.575376 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8485f386-dd23-4d8c-89de-dcb805d8d745" (UID: "8485f386-dd23-4d8c-89de-dcb805d8d745"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.576734 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8485f386-dd23-4d8c-89de-dcb805d8d745" (UID: "8485f386-dd23-4d8c-89de-dcb805d8d745"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.590822 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "8485f386-dd23-4d8c-89de-dcb805d8d745" (UID: "8485f386-dd23-4d8c-89de-dcb805d8d745"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.596016 4710 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.596045 4710 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.596057 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.596070 4710 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.596082 4710 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8485f386-dd23-4d8c-89de-dcb805d8d745-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.596092 4710 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.604853 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f8f55b62-b62b-4571-bee9-8af830b6f8f0" (UID: "f8f55b62-b62b-4571-bee9-8af830b6f8f0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.634591 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-config-data" (OuterVolumeSpecName: "config-data") pod "f8f55b62-b62b-4571-bee9-8af830b6f8f0" (UID: "f8f55b62-b62b-4571-bee9-8af830b6f8f0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.697878 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:52 crc kubenswrapper[4710]: I1009 09:51:52.697905 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8f55b62-b62b-4571-bee9-8af830b6f8f0-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.225491 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"169f03f2-5865-4b76-8042-48c843181f71","Type":"ContainerStarted","Data":"9150375894f4563c7c8e2249c5dd41ee292c5f6416c8d96788a2fa030a6b33d2"} Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.225922 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"169f03f2-5865-4b76-8042-48c843181f71","Type":"ContainerStarted","Data":"6cdfc9421f277fe9a42155df1ec4282fadca2df6a4d595c66f3c74d19f61d01b"} Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.233997 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.234044 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-867c8fd5c5-lmpr2" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.254685 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=3.336866554 podStartE2EDuration="16.25466948s" podCreationTimestamp="2025-10-09 09:51:37 +0000 UTC" firstStartedPulling="2025-10-09 09:51:39.17424799 +0000 UTC m=+2822.664356387" lastFinishedPulling="2025-10-09 09:51:52.092050916 +0000 UTC m=+2835.582159313" observedRunningTime="2025-10-09 09:51:53.25043955 +0000 UTC m=+2836.740547948" watchObservedRunningTime="2025-10-09 09:51:53.25466948 +0000 UTC m=+2836.744777877" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.274771 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-867c8fd5c5-lmpr2"] Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.286594 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-867c8fd5c5-lmpr2"] Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.293088 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.300904 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.307603 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:51:53 crc kubenswrapper[4710]: E1009 09:51:53.308054 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a092a4bc-968b-408e-a835-0180661e24e6" containerName="horizon" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.308117 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="a092a4bc-968b-408e-a835-0180661e24e6" containerName="horizon" Oct 09 09:51:53 crc kubenswrapper[4710]: E1009 09:51:53.308171 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a092a4bc-968b-408e-a835-0180661e24e6" containerName="horizon-log" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.308231 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="a092a4bc-968b-408e-a835-0180661e24e6" containerName="horizon-log" Oct 09 09:51:53 crc kubenswrapper[4710]: E1009 09:51:53.308285 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5643934e-9c27-457b-b0ad-5c8d5a635d26" containerName="horizon-log" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.308335 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="5643934e-9c27-457b-b0ad-5c8d5a635d26" containerName="horizon-log" Oct 09 09:51:53 crc kubenswrapper[4710]: E1009 09:51:53.308395 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8f55b62-b62b-4571-bee9-8af830b6f8f0" containerName="ceilometer-notification-agent" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.308463 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8f55b62-b62b-4571-bee9-8af830b6f8f0" containerName="ceilometer-notification-agent" Oct 09 09:51:53 crc kubenswrapper[4710]: E1009 09:51:53.308525 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4ee793c-672c-48e9-ae90-5741b230c701" containerName="extract-content" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.308586 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4ee793c-672c-48e9-ae90-5741b230c701" containerName="extract-content" Oct 09 09:51:53 crc kubenswrapper[4710]: E1009 09:51:53.308634 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4ee793c-672c-48e9-ae90-5741b230c701" containerName="extract-utilities" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.308681 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4ee793c-672c-48e9-ae90-5741b230c701" containerName="extract-utilities" Oct 09 09:51:53 crc kubenswrapper[4710]: E1009 09:51:53.308736 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8f55b62-b62b-4571-bee9-8af830b6f8f0" containerName="ceilometer-central-agent" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.308777 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8f55b62-b62b-4571-bee9-8af830b6f8f0" containerName="ceilometer-central-agent" Oct 09 09:51:53 crc kubenswrapper[4710]: E1009 09:51:53.308824 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8485f386-dd23-4d8c-89de-dcb805d8d745" containerName="init" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.308864 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="8485f386-dd23-4d8c-89de-dcb805d8d745" containerName="init" Oct 09 09:51:53 crc kubenswrapper[4710]: E1009 09:51:53.308906 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8485f386-dd23-4d8c-89de-dcb805d8d745" containerName="dnsmasq-dns" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.308959 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="8485f386-dd23-4d8c-89de-dcb805d8d745" containerName="dnsmasq-dns" Oct 09 09:51:53 crc kubenswrapper[4710]: E1009 09:51:53.309028 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4ee793c-672c-48e9-ae90-5741b230c701" containerName="registry-server" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.309071 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4ee793c-672c-48e9-ae90-5741b230c701" containerName="registry-server" Oct 09 09:51:53 crc kubenswrapper[4710]: E1009 09:51:53.309116 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8f55b62-b62b-4571-bee9-8af830b6f8f0" containerName="sg-core" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.309157 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8f55b62-b62b-4571-bee9-8af830b6f8f0" containerName="sg-core" Oct 09 09:51:53 crc kubenswrapper[4710]: E1009 09:51:53.309200 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8f55b62-b62b-4571-bee9-8af830b6f8f0" containerName="proxy-httpd" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.309241 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8f55b62-b62b-4571-bee9-8af830b6f8f0" containerName="proxy-httpd" Oct 09 09:51:53 crc kubenswrapper[4710]: E1009 09:51:53.309283 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5643934e-9c27-457b-b0ad-5c8d5a635d26" containerName="horizon" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.309328 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="5643934e-9c27-457b-b0ad-5c8d5a635d26" containerName="horizon" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.309550 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8f55b62-b62b-4571-bee9-8af830b6f8f0" containerName="ceilometer-central-agent" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.309623 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="8485f386-dd23-4d8c-89de-dcb805d8d745" containerName="dnsmasq-dns" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.312951 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8f55b62-b62b-4571-bee9-8af830b6f8f0" containerName="sg-core" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.313040 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="a092a4bc-968b-408e-a835-0180661e24e6" containerName="horizon-log" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.313093 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4ee793c-672c-48e9-ae90-5741b230c701" containerName="registry-server" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.313161 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="5643934e-9c27-457b-b0ad-5c8d5a635d26" containerName="horizon" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.313221 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8f55b62-b62b-4571-bee9-8af830b6f8f0" containerName="ceilometer-notification-agent" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.313270 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="a092a4bc-968b-408e-a835-0180661e24e6" containerName="horizon" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.313330 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="5643934e-9c27-457b-b0ad-5c8d5a635d26" containerName="horizon-log" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.313375 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8f55b62-b62b-4571-bee9-8af830b6f8f0" containerName="proxy-httpd" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.315004 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.317927 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.318115 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.318230 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.324923 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.418843 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-scripts\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.418929 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.419215 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/63d8b24c-4d8f-479f-bb8a-16af498482c5-run-httpd\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.419286 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.419561 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-config-data\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.419720 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.419837 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/63d8b24c-4d8f-479f-bb8a-16af498482c5-log-httpd\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.420132 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7cp5\" (UniqueName: \"kubernetes.io/projected/63d8b24c-4d8f-479f-bb8a-16af498482c5-kube-api-access-w7cp5\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.424251 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:51:53 crc kubenswrapper[4710]: E1009 09:51:53.427892 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[ceilometer-tls-certs combined-ca-bundle config-data kube-api-access-w7cp5 log-httpd run-httpd scripts sg-core-conf-yaml], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/ceilometer-0" podUID="63d8b24c-4d8f-479f-bb8a-16af498482c5" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.521415 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-config-data\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.521486 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.521512 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/63d8b24c-4d8f-479f-bb8a-16af498482c5-log-httpd\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.521550 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7cp5\" (UniqueName: \"kubernetes.io/projected/63d8b24c-4d8f-479f-bb8a-16af498482c5-kube-api-access-w7cp5\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.521586 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-scripts\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.521615 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.521633 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/63d8b24c-4d8f-479f-bb8a-16af498482c5-run-httpd\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.521650 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.522475 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/63d8b24c-4d8f-479f-bb8a-16af498482c5-log-httpd\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.522565 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/63d8b24c-4d8f-479f-bb8a-16af498482c5-run-httpd\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.527550 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.529037 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.529896 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-scripts\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.530294 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-config-data\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.533328 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.539583 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7cp5\" (UniqueName: \"kubernetes.io/projected/63d8b24c-4d8f-479f-bb8a-16af498482c5-kube-api-access-w7cp5\") pod \"ceilometer-0\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " pod="openstack/ceilometer-0" Oct 09 09:51:53 crc kubenswrapper[4710]: I1009 09:51:53.612639 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-dd45d474-nc99p" podUID="c574f93a-f78c-43c0-a159-797171f39d0b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.242:8443/dashboard/auth/login/?next=/dashboard/\": read tcp 10.217.0.2:37694->10.217.0.242:8443: read: connection reset by peer" Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.248943 4710 generic.go:334] "Generic (PLEG): container finished" podID="c574f93a-f78c-43c0-a159-797171f39d0b" containerID="07262d3590ed932bf374182e51c7e52eb519a990dfa2d33d6f54d6053d4c7dc2" exitCode=0 Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.249023 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dd45d474-nc99p" event={"ID":"c574f93a-f78c-43c0-a159-797171f39d0b","Type":"ContainerDied","Data":"07262d3590ed932bf374182e51c7e52eb519a990dfa2d33d6f54d6053d4c7dc2"} Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.251507 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.261197 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.279254 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-dd45d474-nc99p" podUID="c574f93a-f78c-43c0-a159-797171f39d0b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.242:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.242:8443: connect: connection refused" Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.443937 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-sg-core-conf-yaml\") pod \"63d8b24c-4d8f-479f-bb8a-16af498482c5\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.444035 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7cp5\" (UniqueName: \"kubernetes.io/projected/63d8b24c-4d8f-479f-bb8a-16af498482c5-kube-api-access-w7cp5\") pod \"63d8b24c-4d8f-479f-bb8a-16af498482c5\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.444177 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-combined-ca-bundle\") pod \"63d8b24c-4d8f-479f-bb8a-16af498482c5\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.444252 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/63d8b24c-4d8f-479f-bb8a-16af498482c5-log-httpd\") pod \"63d8b24c-4d8f-479f-bb8a-16af498482c5\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.444286 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-config-data\") pod \"63d8b24c-4d8f-479f-bb8a-16af498482c5\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.444352 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-ceilometer-tls-certs\") pod \"63d8b24c-4d8f-479f-bb8a-16af498482c5\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.444483 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-scripts\") pod \"63d8b24c-4d8f-479f-bb8a-16af498482c5\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.444533 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/63d8b24c-4d8f-479f-bb8a-16af498482c5-run-httpd\") pod \"63d8b24c-4d8f-479f-bb8a-16af498482c5\" (UID: \"63d8b24c-4d8f-479f-bb8a-16af498482c5\") " Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.445978 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63d8b24c-4d8f-479f-bb8a-16af498482c5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "63d8b24c-4d8f-479f-bb8a-16af498482c5" (UID: "63d8b24c-4d8f-479f-bb8a-16af498482c5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.449905 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63d8b24c-4d8f-479f-bb8a-16af498482c5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "63d8b24c-4d8f-479f-bb8a-16af498482c5" (UID: "63d8b24c-4d8f-479f-bb8a-16af498482c5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.453417 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-scripts" (OuterVolumeSpecName: "scripts") pod "63d8b24c-4d8f-479f-bb8a-16af498482c5" (UID: "63d8b24c-4d8f-479f-bb8a-16af498482c5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.454840 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63d8b24c-4d8f-479f-bb8a-16af498482c5-kube-api-access-w7cp5" (OuterVolumeSpecName: "kube-api-access-w7cp5") pod "63d8b24c-4d8f-479f-bb8a-16af498482c5" (UID: "63d8b24c-4d8f-479f-bb8a-16af498482c5"). InnerVolumeSpecName "kube-api-access-w7cp5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.455031 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "63d8b24c-4d8f-479f-bb8a-16af498482c5" (UID: "63d8b24c-4d8f-479f-bb8a-16af498482c5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.460239 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "63d8b24c-4d8f-479f-bb8a-16af498482c5" (UID: "63d8b24c-4d8f-479f-bb8a-16af498482c5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.470303 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-config-data" (OuterVolumeSpecName: "config-data") pod "63d8b24c-4d8f-479f-bb8a-16af498482c5" (UID: "63d8b24c-4d8f-479f-bb8a-16af498482c5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.484661 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "63d8b24c-4d8f-479f-bb8a-16af498482c5" (UID: "63d8b24c-4d8f-479f-bb8a-16af498482c5"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.547982 4710 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/63d8b24c-4d8f-479f-bb8a-16af498482c5-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.548020 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.548030 4710 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.548044 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.548052 4710 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/63d8b24c-4d8f-479f-bb8a-16af498482c5-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.548060 4710 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.548071 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7cp5\" (UniqueName: \"kubernetes.io/projected/63d8b24c-4d8f-479f-bb8a-16af498482c5-kube-api-access-w7cp5\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.548080 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63d8b24c-4d8f-479f-bb8a-16af498482c5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.826996 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8485f386-dd23-4d8c-89de-dcb805d8d745" path="/var/lib/kubelet/pods/8485f386-dd23-4d8c-89de-dcb805d8d745/volumes" Oct 09 09:51:54 crc kubenswrapper[4710]: I1009 09:51:54.828185 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8f55b62-b62b-4571-bee9-8af830b6f8f0" path="/var/lib/kubelet/pods/f8f55b62-b62b-4571-bee9-8af830b6f8f0/volumes" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.257382 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.318679 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.318727 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.344134 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.346338 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.349131 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.354737 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.354873 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.365791 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.376243 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c38d2c59-6c38-4516-bc9c-793e554b04c6-scripts\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.376476 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c38d2c59-6c38-4516-bc9c-793e554b04c6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.376607 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkcvk\" (UniqueName: \"kubernetes.io/projected/c38d2c59-6c38-4516-bc9c-793e554b04c6-kube-api-access-vkcvk\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.376695 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c38d2c59-6c38-4516-bc9c-793e554b04c6-run-httpd\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.376779 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c38d2c59-6c38-4516-bc9c-793e554b04c6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.376891 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c38d2c59-6c38-4516-bc9c-793e554b04c6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.377013 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c38d2c59-6c38-4516-bc9c-793e554b04c6-config-data\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.377077 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c38d2c59-6c38-4516-bc9c-793e554b04c6-log-httpd\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.479749 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c38d2c59-6c38-4516-bc9c-793e554b04c6-scripts\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.479853 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c38d2c59-6c38-4516-bc9c-793e554b04c6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.479936 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkcvk\" (UniqueName: \"kubernetes.io/projected/c38d2c59-6c38-4516-bc9c-793e554b04c6-kube-api-access-vkcvk\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.480543 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c38d2c59-6c38-4516-bc9c-793e554b04c6-run-httpd\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.480614 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c38d2c59-6c38-4516-bc9c-793e554b04c6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.480706 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c38d2c59-6c38-4516-bc9c-793e554b04c6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.480825 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c38d2c59-6c38-4516-bc9c-793e554b04c6-config-data\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.480849 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c38d2c59-6c38-4516-bc9c-793e554b04c6-log-httpd\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.480909 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c38d2c59-6c38-4516-bc9c-793e554b04c6-run-httpd\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.481350 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c38d2c59-6c38-4516-bc9c-793e554b04c6-log-httpd\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.485082 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c38d2c59-6c38-4516-bc9c-793e554b04c6-scripts\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.485376 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c38d2c59-6c38-4516-bc9c-793e554b04c6-config-data\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.485973 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c38d2c59-6c38-4516-bc9c-793e554b04c6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.492706 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c38d2c59-6c38-4516-bc9c-793e554b04c6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.498954 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c38d2c59-6c38-4516-bc9c-793e554b04c6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.499995 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkcvk\" (UniqueName: \"kubernetes.io/projected/c38d2c59-6c38-4516-bc9c-793e554b04c6-kube-api-access-vkcvk\") pod \"ceilometer-0\" (UID: \"c38d2c59-6c38-4516-bc9c-793e554b04c6\") " pod="openstack/ceilometer-0" Oct 09 09:51:55 crc kubenswrapper[4710]: I1009 09:51:55.674661 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 09 09:51:56 crc kubenswrapper[4710]: I1009 09:51:56.259900 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 09 09:51:56 crc kubenswrapper[4710]: I1009 09:51:56.827318 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63d8b24c-4d8f-479f-bb8a-16af498482c5" path="/var/lib/kubelet/pods/63d8b24c-4d8f-479f-bb8a-16af498482c5/volumes" Oct 09 09:51:57 crc kubenswrapper[4710]: I1009 09:51:57.279645 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c38d2c59-6c38-4516-bc9c-793e554b04c6","Type":"ContainerStarted","Data":"c7c8798c4adee0f30514c6c9c4954fb0dcb4fde2f3d639e6274bb9c146c815e9"} Oct 09 09:51:57 crc kubenswrapper[4710]: I1009 09:51:57.280063 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c38d2c59-6c38-4516-bc9c-793e554b04c6","Type":"ContainerStarted","Data":"431d0dc8ee6856ed069eb5d3c7cf2c54a9eb0a9590a59a2e14f6098c7979fecf"} Oct 09 09:51:58 crc kubenswrapper[4710]: I1009 09:51:58.289814 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c38d2c59-6c38-4516-bc9c-793e554b04c6","Type":"ContainerStarted","Data":"757dceeb85d51999cb81546ef5fa8ed933b788d06ed7cf0c4d9e851f7f76b712"} Oct 09 09:51:58 crc kubenswrapper[4710]: I1009 09:51:58.312120 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Oct 09 09:51:59 crc kubenswrapper[4710]: I1009 09:51:59.302298 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c38d2c59-6c38-4516-bc9c-793e554b04c6","Type":"ContainerStarted","Data":"03b3e7070943b3665c2d3f6ec27364804267d3327fd88cc937f816b2d13ac4d6"} Oct 09 09:51:59 crc kubenswrapper[4710]: I1009 09:51:59.721380 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Oct 09 09:51:59 crc kubenswrapper[4710]: I1009 09:51:59.771490 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-scheduler-0"] Oct 09 09:52:00 crc kubenswrapper[4710]: I1009 09:52:00.314964 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-scheduler-0" podUID="233de1cb-6266-4ce6-934b-45bd5ce38b8a" containerName="manila-scheduler" containerID="cri-o://4baefd534c0781d8369b767c8a18d1b5e8dd219aab9d69b6be3517aa15a8133a" gracePeriod=30 Oct 09 09:52:00 crc kubenswrapper[4710]: I1009 09:52:00.315499 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-scheduler-0" podUID="233de1cb-6266-4ce6-934b-45bd5ce38b8a" containerName="probe" containerID="cri-o://8c3466e6a499eed150beda0266822efc812c4011e237d49cc10bf95609c36c1b" gracePeriod=30 Oct 09 09:52:01 crc kubenswrapper[4710]: I1009 09:52:01.327478 4710 generic.go:334] "Generic (PLEG): container finished" podID="233de1cb-6266-4ce6-934b-45bd5ce38b8a" containerID="8c3466e6a499eed150beda0266822efc812c4011e237d49cc10bf95609c36c1b" exitCode=0 Oct 09 09:52:01 crc kubenswrapper[4710]: I1009 09:52:01.327605 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"233de1cb-6266-4ce6-934b-45bd5ce38b8a","Type":"ContainerDied","Data":"8c3466e6a499eed150beda0266822efc812c4011e237d49cc10bf95609c36c1b"} Oct 09 09:52:01 crc kubenswrapper[4710]: I1009 09:52:01.331076 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c38d2c59-6c38-4516-bc9c-793e554b04c6","Type":"ContainerStarted","Data":"eb766c9b4180c5fa1511041630925b057cc003985503d1615e2b2f421d41b573"} Oct 09 09:52:01 crc kubenswrapper[4710]: I1009 09:52:01.331247 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 09 09:52:01 crc kubenswrapper[4710]: I1009 09:52:01.357271 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.505100164 podStartE2EDuration="6.357252277s" podCreationTimestamp="2025-10-09 09:51:55 +0000 UTC" firstStartedPulling="2025-10-09 09:51:56.264375962 +0000 UTC m=+2839.754484359" lastFinishedPulling="2025-10-09 09:52:00.116528075 +0000 UTC m=+2843.606636472" observedRunningTime="2025-10-09 09:52:01.348031244 +0000 UTC m=+2844.838139641" watchObservedRunningTime="2025-10-09 09:52:01.357252277 +0000 UTC m=+2844.847360674" Oct 09 09:52:03 crc kubenswrapper[4710]: I1009 09:52:03.352903 4710 generic.go:334] "Generic (PLEG): container finished" podID="233de1cb-6266-4ce6-934b-45bd5ce38b8a" containerID="4baefd534c0781d8369b767c8a18d1b5e8dd219aab9d69b6be3517aa15a8133a" exitCode=0 Oct 09 09:52:03 crc kubenswrapper[4710]: I1009 09:52:03.353313 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"233de1cb-6266-4ce6-934b-45bd5ce38b8a","Type":"ContainerDied","Data":"4baefd534c0781d8369b767c8a18d1b5e8dd219aab9d69b6be3517aa15a8133a"} Oct 09 09:52:03 crc kubenswrapper[4710]: I1009 09:52:03.758156 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 09 09:52:03 crc kubenswrapper[4710]: I1009 09:52:03.768653 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/233de1cb-6266-4ce6-934b-45bd5ce38b8a-etc-machine-id\") pod \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " Oct 09 09:52:03 crc kubenswrapper[4710]: I1009 09:52:03.768706 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-scripts\") pod \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " Oct 09 09:52:03 crc kubenswrapper[4710]: I1009 09:52:03.768790 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-config-data-custom\") pod \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " Oct 09 09:52:03 crc kubenswrapper[4710]: I1009 09:52:03.768787 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/233de1cb-6266-4ce6-934b-45bd5ce38b8a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "233de1cb-6266-4ce6-934b-45bd5ce38b8a" (UID: "233de1cb-6266-4ce6-934b-45bd5ce38b8a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:52:03 crc kubenswrapper[4710]: I1009 09:52:03.768912 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mj8dp\" (UniqueName: \"kubernetes.io/projected/233de1cb-6266-4ce6-934b-45bd5ce38b8a-kube-api-access-mj8dp\") pod \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " Oct 09 09:52:03 crc kubenswrapper[4710]: I1009 09:52:03.768978 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-combined-ca-bundle\") pod \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " Oct 09 09:52:03 crc kubenswrapper[4710]: I1009 09:52:03.769066 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-config-data\") pod \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\" (UID: \"233de1cb-6266-4ce6-934b-45bd5ce38b8a\") " Oct 09 09:52:03 crc kubenswrapper[4710]: I1009 09:52:03.770620 4710 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/233de1cb-6266-4ce6-934b-45bd5ce38b8a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 09 09:52:03 crc kubenswrapper[4710]: I1009 09:52:03.780980 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-scripts" (OuterVolumeSpecName: "scripts") pod "233de1cb-6266-4ce6-934b-45bd5ce38b8a" (UID: "233de1cb-6266-4ce6-934b-45bd5ce38b8a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:52:03 crc kubenswrapper[4710]: I1009 09:52:03.781284 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "233de1cb-6266-4ce6-934b-45bd5ce38b8a" (UID: "233de1cb-6266-4ce6-934b-45bd5ce38b8a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:52:03 crc kubenswrapper[4710]: I1009 09:52:03.781486 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/233de1cb-6266-4ce6-934b-45bd5ce38b8a-kube-api-access-mj8dp" (OuterVolumeSpecName: "kube-api-access-mj8dp") pod "233de1cb-6266-4ce6-934b-45bd5ce38b8a" (UID: "233de1cb-6266-4ce6-934b-45bd5ce38b8a"). InnerVolumeSpecName "kube-api-access-mj8dp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:52:03 crc kubenswrapper[4710]: I1009 09:52:03.851336 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "233de1cb-6266-4ce6-934b-45bd5ce38b8a" (UID: "233de1cb-6266-4ce6-934b-45bd5ce38b8a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:52:03 crc kubenswrapper[4710]: I1009 09:52:03.872962 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mj8dp\" (UniqueName: \"kubernetes.io/projected/233de1cb-6266-4ce6-934b-45bd5ce38b8a-kube-api-access-mj8dp\") on node \"crc\" DevicePath \"\"" Oct 09 09:52:03 crc kubenswrapper[4710]: I1009 09:52:03.872990 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:52:03 crc kubenswrapper[4710]: I1009 09:52:03.872998 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:52:03 crc kubenswrapper[4710]: I1009 09:52:03.873006 4710 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 09 09:52:03 crc kubenswrapper[4710]: I1009 09:52:03.892154 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-config-data" (OuterVolumeSpecName: "config-data") pod "233de1cb-6266-4ce6-934b-45bd5ce38b8a" (UID: "233de1cb-6266-4ce6-934b-45bd5ce38b8a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:52:03 crc kubenswrapper[4710]: I1009 09:52:03.973695 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/233de1cb-6266-4ce6-934b-45bd5ce38b8a-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.279283 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-dd45d474-nc99p" podUID="c574f93a-f78c-43c0-a159-797171f39d0b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.242:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.242:8443: connect: connection refused" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.365603 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"233de1cb-6266-4ce6-934b-45bd5ce38b8a","Type":"ContainerDied","Data":"b87a7e7d867ce262f0ce13b4bf518bd06c6400b1757b67bc29216f1a077fe40a"} Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.365952 4710 scope.go:117] "RemoveContainer" containerID="8c3466e6a499eed150beda0266822efc812c4011e237d49cc10bf95609c36c1b" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.365660 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.393837 4710 scope.go:117] "RemoveContainer" containerID="4baefd534c0781d8369b767c8a18d1b5e8dd219aab9d69b6be3517aa15a8133a" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.396781 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-scheduler-0"] Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.414586 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-scheduler-0"] Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.423995 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Oct 09 09:52:04 crc kubenswrapper[4710]: E1009 09:52:04.424502 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="233de1cb-6266-4ce6-934b-45bd5ce38b8a" containerName="manila-scheduler" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.424524 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="233de1cb-6266-4ce6-934b-45bd5ce38b8a" containerName="manila-scheduler" Oct 09 09:52:04 crc kubenswrapper[4710]: E1009 09:52:04.424532 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="233de1cb-6266-4ce6-934b-45bd5ce38b8a" containerName="probe" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.424539 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="233de1cb-6266-4ce6-934b-45bd5ce38b8a" containerName="probe" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.424763 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="233de1cb-6266-4ce6-934b-45bd5ce38b8a" containerName="manila-scheduler" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.424796 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="233de1cb-6266-4ce6-934b-45bd5ce38b8a" containerName="probe" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.425800 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.430897 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.464313 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.481616 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f91da5bb-5573-4165-8592-6dc828d64596-scripts\") pod \"manila-scheduler-0\" (UID: \"f91da5bb-5573-4165-8592-6dc828d64596\") " pod="openstack/manila-scheduler-0" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.481742 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f91da5bb-5573-4165-8592-6dc828d64596-config-data\") pod \"manila-scheduler-0\" (UID: \"f91da5bb-5573-4165-8592-6dc828d64596\") " pod="openstack/manila-scheduler-0" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.481910 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f91da5bb-5573-4165-8592-6dc828d64596-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"f91da5bb-5573-4165-8592-6dc828d64596\") " pod="openstack/manila-scheduler-0" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.482040 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f91da5bb-5573-4165-8592-6dc828d64596-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"f91da5bb-5573-4165-8592-6dc828d64596\") " pod="openstack/manila-scheduler-0" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.482140 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f91da5bb-5573-4165-8592-6dc828d64596-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"f91da5bb-5573-4165-8592-6dc828d64596\") " pod="openstack/manila-scheduler-0" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.482212 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjhnh\" (UniqueName: \"kubernetes.io/projected/f91da5bb-5573-4165-8592-6dc828d64596-kube-api-access-mjhnh\") pod \"manila-scheduler-0\" (UID: \"f91da5bb-5573-4165-8592-6dc828d64596\") " pod="openstack/manila-scheduler-0" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.584523 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f91da5bb-5573-4165-8592-6dc828d64596-scripts\") pod \"manila-scheduler-0\" (UID: \"f91da5bb-5573-4165-8592-6dc828d64596\") " pod="openstack/manila-scheduler-0" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.584591 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f91da5bb-5573-4165-8592-6dc828d64596-config-data\") pod \"manila-scheduler-0\" (UID: \"f91da5bb-5573-4165-8592-6dc828d64596\") " pod="openstack/manila-scheduler-0" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.584660 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f91da5bb-5573-4165-8592-6dc828d64596-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"f91da5bb-5573-4165-8592-6dc828d64596\") " pod="openstack/manila-scheduler-0" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.584729 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f91da5bb-5573-4165-8592-6dc828d64596-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"f91da5bb-5573-4165-8592-6dc828d64596\") " pod="openstack/manila-scheduler-0" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.584777 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f91da5bb-5573-4165-8592-6dc828d64596-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"f91da5bb-5573-4165-8592-6dc828d64596\") " pod="openstack/manila-scheduler-0" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.584810 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjhnh\" (UniqueName: \"kubernetes.io/projected/f91da5bb-5573-4165-8592-6dc828d64596-kube-api-access-mjhnh\") pod \"manila-scheduler-0\" (UID: \"f91da5bb-5573-4165-8592-6dc828d64596\") " pod="openstack/manila-scheduler-0" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.585393 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f91da5bb-5573-4165-8592-6dc828d64596-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"f91da5bb-5573-4165-8592-6dc828d64596\") " pod="openstack/manila-scheduler-0" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.593205 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f91da5bb-5573-4165-8592-6dc828d64596-scripts\") pod \"manila-scheduler-0\" (UID: \"f91da5bb-5573-4165-8592-6dc828d64596\") " pod="openstack/manila-scheduler-0" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.593540 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f91da5bb-5573-4165-8592-6dc828d64596-config-data\") pod \"manila-scheduler-0\" (UID: \"f91da5bb-5573-4165-8592-6dc828d64596\") " pod="openstack/manila-scheduler-0" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.594887 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f91da5bb-5573-4165-8592-6dc828d64596-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"f91da5bb-5573-4165-8592-6dc828d64596\") " pod="openstack/manila-scheduler-0" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.595080 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f91da5bb-5573-4165-8592-6dc828d64596-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"f91da5bb-5573-4165-8592-6dc828d64596\") " pod="openstack/manila-scheduler-0" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.604644 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjhnh\" (UniqueName: \"kubernetes.io/projected/f91da5bb-5573-4165-8592-6dc828d64596-kube-api-access-mjhnh\") pod \"manila-scheduler-0\" (UID: \"f91da5bb-5573-4165-8592-6dc828d64596\") " pod="openstack/manila-scheduler-0" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.749204 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Oct 09 09:52:04 crc kubenswrapper[4710]: I1009 09:52:04.831536 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="233de1cb-6266-4ce6-934b-45bd5ce38b8a" path="/var/lib/kubelet/pods/233de1cb-6266-4ce6-934b-45bd5ce38b8a/volumes" Oct 09 09:52:05 crc kubenswrapper[4710]: I1009 09:52:05.332591 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Oct 09 09:52:05 crc kubenswrapper[4710]: W1009 09:52:05.340117 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf91da5bb_5573_4165_8592_6dc828d64596.slice/crio-5bd415309abb2d1dfe6c887913c1239ce82e06fdb4e35494037770a91283da90 WatchSource:0}: Error finding container 5bd415309abb2d1dfe6c887913c1239ce82e06fdb4e35494037770a91283da90: Status 404 returned error can't find the container with id 5bd415309abb2d1dfe6c887913c1239ce82e06fdb4e35494037770a91283da90 Oct 09 09:52:05 crc kubenswrapper[4710]: I1009 09:52:05.375101 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"f91da5bb-5573-4165-8592-6dc828d64596","Type":"ContainerStarted","Data":"5bd415309abb2d1dfe6c887913c1239ce82e06fdb4e35494037770a91283da90"} Oct 09 09:52:05 crc kubenswrapper[4710]: I1009 09:52:05.546156 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:52:05 crc kubenswrapper[4710]: I1009 09:52:05.546202 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:52:06 crc kubenswrapper[4710]: I1009 09:52:06.216887 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Oct 09 09:52:06 crc kubenswrapper[4710]: I1009 09:52:06.391499 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"f91da5bb-5573-4165-8592-6dc828d64596","Type":"ContainerStarted","Data":"7c285b6b5384f202148903e2a97ab843fd8abe64341d47dbad1aac694fe91588"} Oct 09 09:52:06 crc kubenswrapper[4710]: I1009 09:52:06.391546 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"f91da5bb-5573-4165-8592-6dc828d64596","Type":"ContainerStarted","Data":"f885c200e0dcff10d81929274258ffe3fb5251716384a48cddbb1f10a475fd0b"} Oct 09 09:52:06 crc kubenswrapper[4710]: I1009 09:52:06.421500 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=2.421486294 podStartE2EDuration="2.421486294s" podCreationTimestamp="2025-10-09 09:52:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:52:06.414690776 +0000 UTC m=+2849.904799173" watchObservedRunningTime="2025-10-09 09:52:06.421486294 +0000 UTC m=+2849.911594692" Oct 09 09:52:10 crc kubenswrapper[4710]: I1009 09:52:10.000287 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Oct 09 09:52:10 crc kubenswrapper[4710]: I1009 09:52:10.056812 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-share-share1-0"] Oct 09 09:52:10 crc kubenswrapper[4710]: I1009 09:52:10.432878 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-share-share1-0" podUID="169f03f2-5865-4b76-8042-48c843181f71" containerName="probe" containerID="cri-o://9150375894f4563c7c8e2249c5dd41ee292c5f6416c8d96788a2fa030a6b33d2" gracePeriod=30 Oct 09 09:52:10 crc kubenswrapper[4710]: I1009 09:52:10.433374 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/manila-share-share1-0" podUID="169f03f2-5865-4b76-8042-48c843181f71" containerName="manila-share" containerID="cri-o://6cdfc9421f277fe9a42155df1ec4282fadca2df6a4d595c66f3c74d19f61d01b" gracePeriod=30 Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.326183 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.384107 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xb8bm\" (UniqueName: \"kubernetes.io/projected/169f03f2-5865-4b76-8042-48c843181f71-kube-api-access-xb8bm\") pod \"169f03f2-5865-4b76-8042-48c843181f71\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.384736 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-config-data-custom\") pod \"169f03f2-5865-4b76-8042-48c843181f71\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.384787 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-config-data\") pod \"169f03f2-5865-4b76-8042-48c843181f71\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.384813 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/169f03f2-5865-4b76-8042-48c843181f71-etc-machine-id\") pod \"169f03f2-5865-4b76-8042-48c843181f71\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.385655 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/169f03f2-5865-4b76-8042-48c843181f71-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "169f03f2-5865-4b76-8042-48c843181f71" (UID: "169f03f2-5865-4b76-8042-48c843181f71"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.385752 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-scripts\") pod \"169f03f2-5865-4b76-8042-48c843181f71\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.390339 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/169f03f2-5865-4b76-8042-48c843181f71-kube-api-access-xb8bm" (OuterVolumeSpecName: "kube-api-access-xb8bm") pod "169f03f2-5865-4b76-8042-48c843181f71" (UID: "169f03f2-5865-4b76-8042-48c843181f71"). InnerVolumeSpecName "kube-api-access-xb8bm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.392274 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/169f03f2-5865-4b76-8042-48c843181f71-ceph\") pod \"169f03f2-5865-4b76-8042-48c843181f71\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.392315 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-combined-ca-bundle\") pod \"169f03f2-5865-4b76-8042-48c843181f71\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.392825 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "169f03f2-5865-4b76-8042-48c843181f71" (UID: "169f03f2-5865-4b76-8042-48c843181f71"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.392554 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/169f03f2-5865-4b76-8042-48c843181f71-var-lib-manila\") pod \"169f03f2-5865-4b76-8042-48c843181f71\" (UID: \"169f03f2-5865-4b76-8042-48c843181f71\") " Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.394147 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xb8bm\" (UniqueName: \"kubernetes.io/projected/169f03f2-5865-4b76-8042-48c843181f71-kube-api-access-xb8bm\") on node \"crc\" DevicePath \"\"" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.394169 4710 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.394178 4710 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/169f03f2-5865-4b76-8042-48c843181f71-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.394689 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-scripts" (OuterVolumeSpecName: "scripts") pod "169f03f2-5865-4b76-8042-48c843181f71" (UID: "169f03f2-5865-4b76-8042-48c843181f71"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.396896 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/169f03f2-5865-4b76-8042-48c843181f71-ceph" (OuterVolumeSpecName: "ceph") pod "169f03f2-5865-4b76-8042-48c843181f71" (UID: "169f03f2-5865-4b76-8042-48c843181f71"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.405332 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/169f03f2-5865-4b76-8042-48c843181f71-var-lib-manila" (OuterVolumeSpecName: "var-lib-manila") pod "169f03f2-5865-4b76-8042-48c843181f71" (UID: "169f03f2-5865-4b76-8042-48c843181f71"). InnerVolumeSpecName "var-lib-manila". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.444251 4710 generic.go:334] "Generic (PLEG): container finished" podID="169f03f2-5865-4b76-8042-48c843181f71" containerID="9150375894f4563c7c8e2249c5dd41ee292c5f6416c8d96788a2fa030a6b33d2" exitCode=0 Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.444280 4710 generic.go:334] "Generic (PLEG): container finished" podID="169f03f2-5865-4b76-8042-48c843181f71" containerID="6cdfc9421f277fe9a42155df1ec4282fadca2df6a4d595c66f3c74d19f61d01b" exitCode=1 Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.444305 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"169f03f2-5865-4b76-8042-48c843181f71","Type":"ContainerDied","Data":"9150375894f4563c7c8e2249c5dd41ee292c5f6416c8d96788a2fa030a6b33d2"} Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.444336 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"169f03f2-5865-4b76-8042-48c843181f71","Type":"ContainerDied","Data":"6cdfc9421f277fe9a42155df1ec4282fadca2df6a4d595c66f3c74d19f61d01b"} Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.444345 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"169f03f2-5865-4b76-8042-48c843181f71","Type":"ContainerDied","Data":"967eb772fcb7d5b303d6a045adc43f380654dda07cc49dba2a542a6c9d45a55f"} Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.444363 4710 scope.go:117] "RemoveContainer" containerID="9150375894f4563c7c8e2249c5dd41ee292c5f6416c8d96788a2fa030a6b33d2" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.444518 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.449052 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "169f03f2-5865-4b76-8042-48c843181f71" (UID: "169f03f2-5865-4b76-8042-48c843181f71"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.467260 4710 scope.go:117] "RemoveContainer" containerID="6cdfc9421f277fe9a42155df1ec4282fadca2df6a4d595c66f3c74d19f61d01b" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.479830 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-config-data" (OuterVolumeSpecName: "config-data") pod "169f03f2-5865-4b76-8042-48c843181f71" (UID: "169f03f2-5865-4b76-8042-48c843181f71"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.496657 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.496679 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.496689 4710 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/169f03f2-5865-4b76-8042-48c843181f71-ceph\") on node \"crc\" DevicePath \"\"" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.496700 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/169f03f2-5865-4b76-8042-48c843181f71-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.496710 4710 reconciler_common.go:293] "Volume detached for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/169f03f2-5865-4b76-8042-48c843181f71-var-lib-manila\") on node \"crc\" DevicePath \"\"" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.503000 4710 scope.go:117] "RemoveContainer" containerID="9150375894f4563c7c8e2249c5dd41ee292c5f6416c8d96788a2fa030a6b33d2" Oct 09 09:52:11 crc kubenswrapper[4710]: E1009 09:52:11.505397 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9150375894f4563c7c8e2249c5dd41ee292c5f6416c8d96788a2fa030a6b33d2\": container with ID starting with 9150375894f4563c7c8e2249c5dd41ee292c5f6416c8d96788a2fa030a6b33d2 not found: ID does not exist" containerID="9150375894f4563c7c8e2249c5dd41ee292c5f6416c8d96788a2fa030a6b33d2" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.505453 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9150375894f4563c7c8e2249c5dd41ee292c5f6416c8d96788a2fa030a6b33d2"} err="failed to get container status \"9150375894f4563c7c8e2249c5dd41ee292c5f6416c8d96788a2fa030a6b33d2\": rpc error: code = NotFound desc = could not find container \"9150375894f4563c7c8e2249c5dd41ee292c5f6416c8d96788a2fa030a6b33d2\": container with ID starting with 9150375894f4563c7c8e2249c5dd41ee292c5f6416c8d96788a2fa030a6b33d2 not found: ID does not exist" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.505478 4710 scope.go:117] "RemoveContainer" containerID="6cdfc9421f277fe9a42155df1ec4282fadca2df6a4d595c66f3c74d19f61d01b" Oct 09 09:52:11 crc kubenswrapper[4710]: E1009 09:52:11.506134 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6cdfc9421f277fe9a42155df1ec4282fadca2df6a4d595c66f3c74d19f61d01b\": container with ID starting with 6cdfc9421f277fe9a42155df1ec4282fadca2df6a4d595c66f3c74d19f61d01b not found: ID does not exist" containerID="6cdfc9421f277fe9a42155df1ec4282fadca2df6a4d595c66f3c74d19f61d01b" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.506165 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cdfc9421f277fe9a42155df1ec4282fadca2df6a4d595c66f3c74d19f61d01b"} err="failed to get container status \"6cdfc9421f277fe9a42155df1ec4282fadca2df6a4d595c66f3c74d19f61d01b\": rpc error: code = NotFound desc = could not find container \"6cdfc9421f277fe9a42155df1ec4282fadca2df6a4d595c66f3c74d19f61d01b\": container with ID starting with 6cdfc9421f277fe9a42155df1ec4282fadca2df6a4d595c66f3c74d19f61d01b not found: ID does not exist" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.506183 4710 scope.go:117] "RemoveContainer" containerID="9150375894f4563c7c8e2249c5dd41ee292c5f6416c8d96788a2fa030a6b33d2" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.506543 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9150375894f4563c7c8e2249c5dd41ee292c5f6416c8d96788a2fa030a6b33d2"} err="failed to get container status \"9150375894f4563c7c8e2249c5dd41ee292c5f6416c8d96788a2fa030a6b33d2\": rpc error: code = NotFound desc = could not find container \"9150375894f4563c7c8e2249c5dd41ee292c5f6416c8d96788a2fa030a6b33d2\": container with ID starting with 9150375894f4563c7c8e2249c5dd41ee292c5f6416c8d96788a2fa030a6b33d2 not found: ID does not exist" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.506585 4710 scope.go:117] "RemoveContainer" containerID="6cdfc9421f277fe9a42155df1ec4282fadca2df6a4d595c66f3c74d19f61d01b" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.506897 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cdfc9421f277fe9a42155df1ec4282fadca2df6a4d595c66f3c74d19f61d01b"} err="failed to get container status \"6cdfc9421f277fe9a42155df1ec4282fadca2df6a4d595c66f3c74d19f61d01b\": rpc error: code = NotFound desc = could not find container \"6cdfc9421f277fe9a42155df1ec4282fadca2df6a4d595c66f3c74d19f61d01b\": container with ID starting with 6cdfc9421f277fe9a42155df1ec4282fadca2df6a4d595c66f3c74d19f61d01b not found: ID does not exist" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.788865 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-share-share1-0"] Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.833770 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-share-share1-0"] Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.865698 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Oct 09 09:52:11 crc kubenswrapper[4710]: E1009 09:52:11.866090 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="169f03f2-5865-4b76-8042-48c843181f71" containerName="manila-share" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.866108 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="169f03f2-5865-4b76-8042-48c843181f71" containerName="manila-share" Oct 09 09:52:11 crc kubenswrapper[4710]: E1009 09:52:11.866137 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="169f03f2-5865-4b76-8042-48c843181f71" containerName="probe" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.866144 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="169f03f2-5865-4b76-8042-48c843181f71" containerName="probe" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.866329 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="169f03f2-5865-4b76-8042-48c843181f71" containerName="probe" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.866359 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="169f03f2-5865-4b76-8042-48c843181f71" containerName="manila-share" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.867605 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.869746 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Oct 09 09:52:11 crc kubenswrapper[4710]: I1009 09:52:11.875623 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.017786 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3efa48f2-2c46-4963-aed1-aa57cb9ada01-scripts\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.018228 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3efa48f2-2c46-4963-aed1-aa57cb9ada01-ceph\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.018558 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3efa48f2-2c46-4963-aed1-aa57cb9ada01-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.018706 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wt9fm\" (UniqueName: \"kubernetes.io/projected/3efa48f2-2c46-4963-aed1-aa57cb9ada01-kube-api-access-wt9fm\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.018754 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3efa48f2-2c46-4963-aed1-aa57cb9ada01-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.018781 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3efa48f2-2c46-4963-aed1-aa57cb9ada01-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.018824 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3efa48f2-2c46-4963-aed1-aa57cb9ada01-config-data\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.018854 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/3efa48f2-2c46-4963-aed1-aa57cb9ada01-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.122328 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3efa48f2-2c46-4963-aed1-aa57cb9ada01-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.122465 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wt9fm\" (UniqueName: \"kubernetes.io/projected/3efa48f2-2c46-4963-aed1-aa57cb9ada01-kube-api-access-wt9fm\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.122494 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3efa48f2-2c46-4963-aed1-aa57cb9ada01-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.122496 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3efa48f2-2c46-4963-aed1-aa57cb9ada01-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.122515 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3efa48f2-2c46-4963-aed1-aa57cb9ada01-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.122614 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3efa48f2-2c46-4963-aed1-aa57cb9ada01-config-data\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.122658 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/3efa48f2-2c46-4963-aed1-aa57cb9ada01-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.122739 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3efa48f2-2c46-4963-aed1-aa57cb9ada01-scripts\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.122894 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/3efa48f2-2c46-4963-aed1-aa57cb9ada01-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.123034 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3efa48f2-2c46-4963-aed1-aa57cb9ada01-ceph\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.129878 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3efa48f2-2c46-4963-aed1-aa57cb9ada01-scripts\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.129958 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3efa48f2-2c46-4963-aed1-aa57cb9ada01-ceph\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.130173 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3efa48f2-2c46-4963-aed1-aa57cb9ada01-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.130282 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3efa48f2-2c46-4963-aed1-aa57cb9ada01-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.137002 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3efa48f2-2c46-4963-aed1-aa57cb9ada01-config-data\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.138499 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wt9fm\" (UniqueName: \"kubernetes.io/projected/3efa48f2-2c46-4963-aed1-aa57cb9ada01-kube-api-access-wt9fm\") pod \"manila-share-share1-0\" (UID: \"3efa48f2-2c46-4963-aed1-aa57cb9ada01\") " pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.185264 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.721611 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Oct 09 09:52:12 crc kubenswrapper[4710]: I1009 09:52:12.830728 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="169f03f2-5865-4b76-8042-48c843181f71" path="/var/lib/kubelet/pods/169f03f2-5865-4b76-8042-48c843181f71/volumes" Oct 09 09:52:13 crc kubenswrapper[4710]: I1009 09:52:13.465351 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"3efa48f2-2c46-4963-aed1-aa57cb9ada01","Type":"ContainerStarted","Data":"3ee5f57d43d91a9d766bdb008505583357f03225fc4c0a7b43b93314fe71f32e"} Oct 09 09:52:13 crc kubenswrapper[4710]: I1009 09:52:13.465677 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"3efa48f2-2c46-4963-aed1-aa57cb9ada01","Type":"ContainerStarted","Data":"61523820a7439e284419a82e6fa111eb002aef124264525cbb77640053a3f3fd"} Oct 09 09:52:14 crc kubenswrapper[4710]: I1009 09:52:14.278979 4710 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-dd45d474-nc99p" podUID="c574f93a-f78c-43c0-a159-797171f39d0b" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.242:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.242:8443: connect: connection refused" Oct 09 09:52:14 crc kubenswrapper[4710]: I1009 09:52:14.478070 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"3efa48f2-2c46-4963-aed1-aa57cb9ada01","Type":"ContainerStarted","Data":"fafa1d801a973a6f82331c3472ac523d5fc50e587d40628516317e106ff8fae9"} Oct 09 09:52:14 crc kubenswrapper[4710]: I1009 09:52:14.501099 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=3.50108162 podStartE2EDuration="3.50108162s" podCreationTimestamp="2025-10-09 09:52:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 09:52:14.498159799 +0000 UTC m=+2857.988268196" watchObservedRunningTime="2025-10-09 09:52:14.50108162 +0000 UTC m=+2857.991190018" Oct 09 09:52:14 crc kubenswrapper[4710]: I1009 09:52:14.750279 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.541492 4710 generic.go:334] "Generic (PLEG): container finished" podID="c574f93a-f78c-43c0-a159-797171f39d0b" containerID="d53c16c67c5b0020f557c562195ccfa873e3b2e20bbf3ea62771bea097dc7b4a" exitCode=137 Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.541580 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dd45d474-nc99p" event={"ID":"c574f93a-f78c-43c0-a159-797171f39d0b","Type":"ContainerDied","Data":"d53c16c67c5b0020f557c562195ccfa873e3b2e20bbf3ea62771bea097dc7b4a"} Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.626720 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.738670 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c574f93a-f78c-43c0-a159-797171f39d0b-logs\") pod \"c574f93a-f78c-43c0-a159-797171f39d0b\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.738743 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c574f93a-f78c-43c0-a159-797171f39d0b-scripts\") pod \"c574f93a-f78c-43c0-a159-797171f39d0b\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.738897 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c574f93a-f78c-43c0-a159-797171f39d0b-horizon-secret-key\") pod \"c574f93a-f78c-43c0-a159-797171f39d0b\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.739294 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c574f93a-f78c-43c0-a159-797171f39d0b-logs" (OuterVolumeSpecName: "logs") pod "c574f93a-f78c-43c0-a159-797171f39d0b" (UID: "c574f93a-f78c-43c0-a159-797171f39d0b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.739796 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c574f93a-f78c-43c0-a159-797171f39d0b-horizon-tls-certs\") pod \"c574f93a-f78c-43c0-a159-797171f39d0b\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.739904 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzhcs\" (UniqueName: \"kubernetes.io/projected/c574f93a-f78c-43c0-a159-797171f39d0b-kube-api-access-fzhcs\") pod \"c574f93a-f78c-43c0-a159-797171f39d0b\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.740108 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c574f93a-f78c-43c0-a159-797171f39d0b-config-data\") pod \"c574f93a-f78c-43c0-a159-797171f39d0b\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.740206 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c574f93a-f78c-43c0-a159-797171f39d0b-combined-ca-bundle\") pod \"c574f93a-f78c-43c0-a159-797171f39d0b\" (UID: \"c574f93a-f78c-43c0-a159-797171f39d0b\") " Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.741559 4710 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c574f93a-f78c-43c0-a159-797171f39d0b-logs\") on node \"crc\" DevicePath \"\"" Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.749799 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c574f93a-f78c-43c0-a159-797171f39d0b-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "c574f93a-f78c-43c0-a159-797171f39d0b" (UID: "c574f93a-f78c-43c0-a159-797171f39d0b"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.749853 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c574f93a-f78c-43c0-a159-797171f39d0b-kube-api-access-fzhcs" (OuterVolumeSpecName: "kube-api-access-fzhcs") pod "c574f93a-f78c-43c0-a159-797171f39d0b" (UID: "c574f93a-f78c-43c0-a159-797171f39d0b"). InnerVolumeSpecName "kube-api-access-fzhcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.774007 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c574f93a-f78c-43c0-a159-797171f39d0b-scripts" (OuterVolumeSpecName: "scripts") pod "c574f93a-f78c-43c0-a159-797171f39d0b" (UID: "c574f93a-f78c-43c0-a159-797171f39d0b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.774076 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c574f93a-f78c-43c0-a159-797171f39d0b-config-data" (OuterVolumeSpecName: "config-data") pod "c574f93a-f78c-43c0-a159-797171f39d0b" (UID: "c574f93a-f78c-43c0-a159-797171f39d0b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.774877 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c574f93a-f78c-43c0-a159-797171f39d0b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c574f93a-f78c-43c0-a159-797171f39d0b" (UID: "c574f93a-f78c-43c0-a159-797171f39d0b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.785961 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c574f93a-f78c-43c0-a159-797171f39d0b-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "c574f93a-f78c-43c0-a159-797171f39d0b" (UID: "c574f93a-f78c-43c0-a159-797171f39d0b"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.843440 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c574f93a-f78c-43c0-a159-797171f39d0b-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.843639 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c574f93a-f78c-43c0-a159-797171f39d0b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.843653 4710 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c574f93a-f78c-43c0-a159-797171f39d0b-scripts\") on node \"crc\" DevicePath \"\"" Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.843662 4710 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c574f93a-f78c-43c0-a159-797171f39d0b-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.843671 4710 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c574f93a-f78c-43c0-a159-797171f39d0b-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 09 09:52:20 crc kubenswrapper[4710]: I1009 09:52:20.843680 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzhcs\" (UniqueName: \"kubernetes.io/projected/c574f93a-f78c-43c0-a159-797171f39d0b-kube-api-access-fzhcs\") on node \"crc\" DevicePath \"\"" Oct 09 09:52:21 crc kubenswrapper[4710]: I1009 09:52:21.555961 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dd45d474-nc99p" event={"ID":"c574f93a-f78c-43c0-a159-797171f39d0b","Type":"ContainerDied","Data":"2259e5a0358432dc8d51e0dbf4dfc963f3c923b648aa863b1b276710a285e3b1"} Oct 09 09:52:21 crc kubenswrapper[4710]: I1009 09:52:21.556036 4710 scope.go:117] "RemoveContainer" containerID="07262d3590ed932bf374182e51c7e52eb519a990dfa2d33d6f54d6053d4c7dc2" Oct 09 09:52:21 crc kubenswrapper[4710]: I1009 09:52:21.556076 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-dd45d474-nc99p" Oct 09 09:52:21 crc kubenswrapper[4710]: I1009 09:52:21.592242 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-dd45d474-nc99p"] Oct 09 09:52:21 crc kubenswrapper[4710]: I1009 09:52:21.605799 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-dd45d474-nc99p"] Oct 09 09:52:21 crc kubenswrapper[4710]: I1009 09:52:21.729722 4710 scope.go:117] "RemoveContainer" containerID="d53c16c67c5b0020f557c562195ccfa873e3b2e20bbf3ea62771bea097dc7b4a" Oct 09 09:52:22 crc kubenswrapper[4710]: I1009 09:52:22.186226 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Oct 09 09:52:22 crc kubenswrapper[4710]: I1009 09:52:22.826356 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c574f93a-f78c-43c0-a159-797171f39d0b" path="/var/lib/kubelet/pods/c574f93a-f78c-43c0-a159-797171f39d0b/volumes" Oct 09 09:52:25 crc kubenswrapper[4710]: I1009 09:52:25.686732 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 09 09:52:26 crc kubenswrapper[4710]: I1009 09:52:26.101986 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Oct 09 09:52:33 crc kubenswrapper[4710]: I1009 09:52:33.614947 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Oct 09 09:52:35 crc kubenswrapper[4710]: I1009 09:52:35.545732 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:52:35 crc kubenswrapper[4710]: I1009 09:52:35.546824 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:52:52 crc kubenswrapper[4710]: I1009 09:52:52.148388 4710 scope.go:117] "RemoveContainer" containerID="790eafc42b4544f6036f7ca62edac0dc58b89a6318dd95ba82d5755143bfcec5" Oct 09 09:52:52 crc kubenswrapper[4710]: I1009 09:52:52.173577 4710 scope.go:117] "RemoveContainer" containerID="71b324a27cf5c64b9578d498d815a5d83e42a31679941e92d217a2fa71181e6c" Oct 09 09:52:52 crc kubenswrapper[4710]: I1009 09:52:52.207285 4710 scope.go:117] "RemoveContainer" containerID="9193356d85269ddeb233452187c77556505b3e97645cc1c6b911a04df1eb8f90" Oct 09 09:52:52 crc kubenswrapper[4710]: I1009 09:52:52.225412 4710 scope.go:117] "RemoveContainer" containerID="8609a1b6561465621721783889e5843e5fa6e6aff66724c5e99f03c8c5c0bef5" Oct 09 09:52:52 crc kubenswrapper[4710]: I1009 09:52:52.246329 4710 scope.go:117] "RemoveContainer" containerID="cbfec75f5679b7e131e87c217ceb79f5091c1c68b8de8f71af2f81bc1b74eb55" Oct 09 09:52:52 crc kubenswrapper[4710]: I1009 09:52:52.262523 4710 scope.go:117] "RemoveContainer" containerID="8ee7a0d86d9a6017b8ebd6c9f0e7b7c0a7869a4169cb812a52ea78a01e8d964e" Oct 09 09:53:05 crc kubenswrapper[4710]: I1009 09:53:05.545774 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 09:53:05 crc kubenswrapper[4710]: I1009 09:53:05.546313 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 09:53:05 crc kubenswrapper[4710]: I1009 09:53:05.546360 4710 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 09:53:05 crc kubenswrapper[4710]: I1009 09:53:05.546936 4710 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51"} pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 09:53:05 crc kubenswrapper[4710]: I1009 09:53:05.547006 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" containerID="cri-o://2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" gracePeriod=600 Oct 09 09:53:05 crc kubenswrapper[4710]: E1009 09:53:05.679005 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:53:06 crc kubenswrapper[4710]: I1009 09:53:06.030159 4710 generic.go:334] "Generic (PLEG): container finished" podID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" exitCode=0 Oct 09 09:53:06 crc kubenswrapper[4710]: I1009 09:53:06.030226 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerDied","Data":"2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51"} Oct 09 09:53:06 crc kubenswrapper[4710]: I1009 09:53:06.030288 4710 scope.go:117] "RemoveContainer" containerID="053058b473714540c8ffd33c2543c1558912e10c236f0b0629e0d2c8432bc8ef" Oct 09 09:53:06 crc kubenswrapper[4710]: I1009 09:53:06.031171 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:53:06 crc kubenswrapper[4710]: E1009 09:53:06.031416 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:53:18 crc kubenswrapper[4710]: I1009 09:53:18.815874 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:53:18 crc kubenswrapper[4710]: E1009 09:53:18.817773 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.362295 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Oct 09 09:53:29 crc kubenswrapper[4710]: E1009 09:53:29.363674 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c574f93a-f78c-43c0-a159-797171f39d0b" containerName="horizon" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.363689 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="c574f93a-f78c-43c0-a159-797171f39d0b" containerName="horizon" Oct 09 09:53:29 crc kubenswrapper[4710]: E1009 09:53:29.363707 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c574f93a-f78c-43c0-a159-797171f39d0b" containerName="horizon-log" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.363713 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="c574f93a-f78c-43c0-a159-797171f39d0b" containerName="horizon-log" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.364732 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="c574f93a-f78c-43c0-a159-797171f39d0b" containerName="horizon-log" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.364778 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="c574f93a-f78c-43c0-a159-797171f39d0b" containerName="horizon" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.365962 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.369643 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-b95j2" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.369810 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.370006 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.370290 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.397514 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.464844 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/36f8a9d9-4342-4b4f-921b-a0acbe1215db-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.465062 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/36f8a9d9-4342-4b4f-921b-a0acbe1215db-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.465160 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/36f8a9d9-4342-4b4f-921b-a0acbe1215db-config-data\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.568130 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/36f8a9d9-4342-4b4f-921b-a0acbe1215db-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.568190 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/36f8a9d9-4342-4b4f-921b-a0acbe1215db-config-data\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.568454 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/36f8a9d9-4342-4b4f-921b-a0acbe1215db-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.568503 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl2qf\" (UniqueName: \"kubernetes.io/projected/36f8a9d9-4342-4b4f-921b-a0acbe1215db-kube-api-access-cl2qf\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.568632 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/36f8a9d9-4342-4b4f-921b-a0acbe1215db-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.568670 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.568698 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/36f8a9d9-4342-4b4f-921b-a0acbe1215db-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.568744 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/36f8a9d9-4342-4b4f-921b-a0acbe1215db-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.568767 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/36f8a9d9-4342-4b4f-921b-a0acbe1215db-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.569291 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/36f8a9d9-4342-4b4f-921b-a0acbe1215db-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.570078 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/36f8a9d9-4342-4b4f-921b-a0acbe1215db-config-data\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.577026 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/36f8a9d9-4342-4b4f-921b-a0acbe1215db-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.670586 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/36f8a9d9-4342-4b4f-921b-a0acbe1215db-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.671132 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.671534 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/36f8a9d9-4342-4b4f-921b-a0acbe1215db-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.672154 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/36f8a9d9-4342-4b4f-921b-a0acbe1215db-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.671463 4710 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.671066 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/36f8a9d9-4342-4b4f-921b-a0acbe1215db-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.672666 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/36f8a9d9-4342-4b4f-921b-a0acbe1215db-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.672703 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/36f8a9d9-4342-4b4f-921b-a0acbe1215db-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.672719 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl2qf\" (UniqueName: \"kubernetes.io/projected/36f8a9d9-4342-4b4f-921b-a0acbe1215db-kube-api-access-cl2qf\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.676706 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/36f8a9d9-4342-4b4f-921b-a0acbe1215db-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.678152 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/36f8a9d9-4342-4b4f-921b-a0acbe1215db-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.689345 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl2qf\" (UniqueName: \"kubernetes.io/projected/36f8a9d9-4342-4b4f-921b-a0acbe1215db-kube-api-access-cl2qf\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.697897 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " pod="openstack/tempest-tests-tempest" Oct 09 09:53:29 crc kubenswrapper[4710]: I1009 09:53:29.988666 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 09 09:53:30 crc kubenswrapper[4710]: I1009 09:53:30.404593 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Oct 09 09:53:30 crc kubenswrapper[4710]: I1009 09:53:30.427015 4710 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 09:53:31 crc kubenswrapper[4710]: I1009 09:53:31.300989 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"36f8a9d9-4342-4b4f-921b-a0acbe1215db","Type":"ContainerStarted","Data":"4ba79cc181887cef054997939a32875dc72c401dd49c7a5418a3c67af600a2e1"} Oct 09 09:53:33 crc kubenswrapper[4710]: I1009 09:53:33.815422 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:53:33 crc kubenswrapper[4710]: E1009 09:53:33.816068 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:53:48 crc kubenswrapper[4710]: I1009 09:53:48.816067 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:53:48 crc kubenswrapper[4710]: E1009 09:53:48.816873 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:53:59 crc kubenswrapper[4710]: I1009 09:53:59.815875 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:53:59 crc kubenswrapper[4710]: E1009 09:53:59.816857 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:54:06 crc kubenswrapper[4710]: E1009 09:54:06.079524 4710 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Oct 09 09:54:06 crc kubenswrapper[4710]: E1009 09:54:06.079968 4710 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cl2qf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(36f8a9d9-4342-4b4f-921b-a0acbe1215db): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 09 09:54:06 crc kubenswrapper[4710]: E1009 09:54:06.081472 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="36f8a9d9-4342-4b4f-921b-a0acbe1215db" Oct 09 09:54:06 crc kubenswrapper[4710]: E1009 09:54:06.684233 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="36f8a9d9-4342-4b4f-921b-a0acbe1215db" Oct 09 09:54:10 crc kubenswrapper[4710]: I1009 09:54:10.816390 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:54:10 crc kubenswrapper[4710]: E1009 09:54:10.817190 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:54:22 crc kubenswrapper[4710]: I1009 09:54:22.433883 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Oct 09 09:54:23 crc kubenswrapper[4710]: I1009 09:54:23.828385 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"36f8a9d9-4342-4b4f-921b-a0acbe1215db","Type":"ContainerStarted","Data":"fc2e0f94f651966a3671a5af7bd702dc5271ed6626611d82ffad607f222fa3c6"} Oct 09 09:54:25 crc kubenswrapper[4710]: I1009 09:54:25.814987 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:54:25 crc kubenswrapper[4710]: E1009 09:54:25.815469 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:54:36 crc kubenswrapper[4710]: I1009 09:54:36.820741 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:54:36 crc kubenswrapper[4710]: E1009 09:54:36.821789 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:54:51 crc kubenswrapper[4710]: I1009 09:54:51.814423 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:54:51 crc kubenswrapper[4710]: E1009 09:54:51.814959 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:55:04 crc kubenswrapper[4710]: I1009 09:55:04.817022 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:55:04 crc kubenswrapper[4710]: E1009 09:55:04.817636 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:55:18 crc kubenswrapper[4710]: I1009 09:55:18.814754 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:55:18 crc kubenswrapper[4710]: E1009 09:55:18.815386 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:55:32 crc kubenswrapper[4710]: I1009 09:55:32.814841 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:55:32 crc kubenswrapper[4710]: E1009 09:55:32.815468 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:55:47 crc kubenswrapper[4710]: I1009 09:55:47.815218 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:55:47 crc kubenswrapper[4710]: E1009 09:55:47.815855 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:56:02 crc kubenswrapper[4710]: I1009 09:56:02.815532 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:56:02 crc kubenswrapper[4710]: E1009 09:56:02.816339 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:56:15 crc kubenswrapper[4710]: I1009 09:56:15.814407 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:56:15 crc kubenswrapper[4710]: E1009 09:56:15.815014 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:56:30 crc kubenswrapper[4710]: I1009 09:56:30.815225 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:56:30 crc kubenswrapper[4710]: E1009 09:56:30.815925 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:56:44 crc kubenswrapper[4710]: I1009 09:56:44.814611 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:56:44 crc kubenswrapper[4710]: E1009 09:56:44.815266 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:56:55 crc kubenswrapper[4710]: I1009 09:56:55.815028 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:56:55 crc kubenswrapper[4710]: E1009 09:56:55.815637 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:57:07 crc kubenswrapper[4710]: I1009 09:57:07.815507 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:57:07 crc kubenswrapper[4710]: E1009 09:57:07.816194 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:57:22 crc kubenswrapper[4710]: I1009 09:57:22.814998 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:57:22 crc kubenswrapper[4710]: E1009 09:57:22.816401 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:57:37 crc kubenswrapper[4710]: I1009 09:57:37.814647 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:57:37 crc kubenswrapper[4710]: E1009 09:57:37.815791 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:57:48 crc kubenswrapper[4710]: I1009 09:57:48.815315 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:57:48 crc kubenswrapper[4710]: E1009 09:57:48.816307 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:58:01 crc kubenswrapper[4710]: I1009 09:58:01.816393 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:58:01 crc kubenswrapper[4710]: E1009 09:58:01.817151 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 09:58:15 crc kubenswrapper[4710]: I1009 09:58:15.815505 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 09:58:16 crc kubenswrapper[4710]: I1009 09:58:16.512004 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerStarted","Data":"87d054944fb6c806eb0843e146a21517502d7028c16689af5a0b689040c6c2a2"} Oct 09 09:58:16 crc kubenswrapper[4710]: I1009 09:58:16.529538 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=236.525257053 podStartE2EDuration="4m48.52951835s" podCreationTimestamp="2025-10-09 09:53:28 +0000 UTC" firstStartedPulling="2025-10-09 09:53:30.42656356 +0000 UTC m=+2933.916671958" lastFinishedPulling="2025-10-09 09:54:22.430824857 +0000 UTC m=+2985.920933255" observedRunningTime="2025-10-09 09:54:23.846928169 +0000 UTC m=+2987.337036566" watchObservedRunningTime="2025-10-09 09:58:16.52951835 +0000 UTC m=+3220.019626747" Oct 09 09:58:47 crc kubenswrapper[4710]: I1009 09:58:47.794881 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-l64js"] Oct 09 09:58:47 crc kubenswrapper[4710]: I1009 09:58:47.798258 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l64js" Oct 09 09:58:47 crc kubenswrapper[4710]: I1009 09:58:47.809327 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l64js"] Oct 09 09:58:47 crc kubenswrapper[4710]: I1009 09:58:47.818531 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvbvt\" (UniqueName: \"kubernetes.io/projected/8fe184a2-dfb0-411c-b9cc-b257ca358031-kube-api-access-qvbvt\") pod \"redhat-marketplace-l64js\" (UID: \"8fe184a2-dfb0-411c-b9cc-b257ca358031\") " pod="openshift-marketplace/redhat-marketplace-l64js" Oct 09 09:58:47 crc kubenswrapper[4710]: I1009 09:58:47.818584 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fe184a2-dfb0-411c-b9cc-b257ca358031-utilities\") pod \"redhat-marketplace-l64js\" (UID: \"8fe184a2-dfb0-411c-b9cc-b257ca358031\") " pod="openshift-marketplace/redhat-marketplace-l64js" Oct 09 09:58:47 crc kubenswrapper[4710]: I1009 09:58:47.818724 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fe184a2-dfb0-411c-b9cc-b257ca358031-catalog-content\") pod \"redhat-marketplace-l64js\" (UID: \"8fe184a2-dfb0-411c-b9cc-b257ca358031\") " pod="openshift-marketplace/redhat-marketplace-l64js" Oct 09 09:58:47 crc kubenswrapper[4710]: I1009 09:58:47.921453 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvbvt\" (UniqueName: \"kubernetes.io/projected/8fe184a2-dfb0-411c-b9cc-b257ca358031-kube-api-access-qvbvt\") pod \"redhat-marketplace-l64js\" (UID: \"8fe184a2-dfb0-411c-b9cc-b257ca358031\") " pod="openshift-marketplace/redhat-marketplace-l64js" Oct 09 09:58:47 crc kubenswrapper[4710]: I1009 09:58:47.921627 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fe184a2-dfb0-411c-b9cc-b257ca358031-utilities\") pod \"redhat-marketplace-l64js\" (UID: \"8fe184a2-dfb0-411c-b9cc-b257ca358031\") " pod="openshift-marketplace/redhat-marketplace-l64js" Oct 09 09:58:47 crc kubenswrapper[4710]: I1009 09:58:47.921819 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fe184a2-dfb0-411c-b9cc-b257ca358031-catalog-content\") pod \"redhat-marketplace-l64js\" (UID: \"8fe184a2-dfb0-411c-b9cc-b257ca358031\") " pod="openshift-marketplace/redhat-marketplace-l64js" Oct 09 09:58:47 crc kubenswrapper[4710]: I1009 09:58:47.922097 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fe184a2-dfb0-411c-b9cc-b257ca358031-utilities\") pod \"redhat-marketplace-l64js\" (UID: \"8fe184a2-dfb0-411c-b9cc-b257ca358031\") " pod="openshift-marketplace/redhat-marketplace-l64js" Oct 09 09:58:47 crc kubenswrapper[4710]: I1009 09:58:47.922157 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fe184a2-dfb0-411c-b9cc-b257ca358031-catalog-content\") pod \"redhat-marketplace-l64js\" (UID: \"8fe184a2-dfb0-411c-b9cc-b257ca358031\") " pod="openshift-marketplace/redhat-marketplace-l64js" Oct 09 09:58:47 crc kubenswrapper[4710]: I1009 09:58:47.948250 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvbvt\" (UniqueName: \"kubernetes.io/projected/8fe184a2-dfb0-411c-b9cc-b257ca358031-kube-api-access-qvbvt\") pod \"redhat-marketplace-l64js\" (UID: \"8fe184a2-dfb0-411c-b9cc-b257ca358031\") " pod="openshift-marketplace/redhat-marketplace-l64js" Oct 09 09:58:48 crc kubenswrapper[4710]: I1009 09:58:48.126070 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l64js" Oct 09 09:58:48 crc kubenswrapper[4710]: I1009 09:58:48.563519 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-l64js"] Oct 09 09:58:48 crc kubenswrapper[4710]: I1009 09:58:48.768419 4710 generic.go:334] "Generic (PLEG): container finished" podID="8fe184a2-dfb0-411c-b9cc-b257ca358031" containerID="1ac321b8c5139354b1bf3e31b4511cc0a46a31166f51a522c133a3e925f4a26e" exitCode=0 Oct 09 09:58:48 crc kubenswrapper[4710]: I1009 09:58:48.768584 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l64js" event={"ID":"8fe184a2-dfb0-411c-b9cc-b257ca358031","Type":"ContainerDied","Data":"1ac321b8c5139354b1bf3e31b4511cc0a46a31166f51a522c133a3e925f4a26e"} Oct 09 09:58:48 crc kubenswrapper[4710]: I1009 09:58:48.768669 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l64js" event={"ID":"8fe184a2-dfb0-411c-b9cc-b257ca358031","Type":"ContainerStarted","Data":"4cc58148551d179d21128c451fdbd39cc5232d7c29e6c78216f1fc71fa074e4d"} Oct 09 09:58:48 crc kubenswrapper[4710]: I1009 09:58:48.770324 4710 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 09:58:49 crc kubenswrapper[4710]: I1009 09:58:49.784602 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l64js" event={"ID":"8fe184a2-dfb0-411c-b9cc-b257ca358031","Type":"ContainerStarted","Data":"b8e56e949265d3f601c39d57ac36ac15fe67cc1e3e4f05a2166dcead11b6854f"} Oct 09 09:58:50 crc kubenswrapper[4710]: I1009 09:58:50.797702 4710 generic.go:334] "Generic (PLEG): container finished" podID="8fe184a2-dfb0-411c-b9cc-b257ca358031" containerID="b8e56e949265d3f601c39d57ac36ac15fe67cc1e3e4f05a2166dcead11b6854f" exitCode=0 Oct 09 09:58:50 crc kubenswrapper[4710]: I1009 09:58:50.798062 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l64js" event={"ID":"8fe184a2-dfb0-411c-b9cc-b257ca358031","Type":"ContainerDied","Data":"b8e56e949265d3f601c39d57ac36ac15fe67cc1e3e4f05a2166dcead11b6854f"} Oct 09 09:58:51 crc kubenswrapper[4710]: I1009 09:58:51.807498 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l64js" event={"ID":"8fe184a2-dfb0-411c-b9cc-b257ca358031","Type":"ContainerStarted","Data":"0d92bb3906cd200cf79e705c1c25c94dfe55b649cb4aaf497d7681a5c2874221"} Oct 09 09:58:51 crc kubenswrapper[4710]: I1009 09:58:51.826717 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-l64js" podStartSLOduration=2.138624311 podStartE2EDuration="4.82670175s" podCreationTimestamp="2025-10-09 09:58:47 +0000 UTC" firstStartedPulling="2025-10-09 09:58:48.77011496 +0000 UTC m=+3252.260223356" lastFinishedPulling="2025-10-09 09:58:51.458192397 +0000 UTC m=+3254.948300795" observedRunningTime="2025-10-09 09:58:51.821651787 +0000 UTC m=+3255.311760184" watchObservedRunningTime="2025-10-09 09:58:51.82670175 +0000 UTC m=+3255.316810147" Oct 09 09:58:58 crc kubenswrapper[4710]: I1009 09:58:58.126391 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-l64js" Oct 09 09:58:58 crc kubenswrapper[4710]: I1009 09:58:58.128795 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-l64js" Oct 09 09:58:58 crc kubenswrapper[4710]: I1009 09:58:58.174119 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-l64js" Oct 09 09:58:58 crc kubenswrapper[4710]: I1009 09:58:58.894581 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-l64js" Oct 09 09:58:58 crc kubenswrapper[4710]: I1009 09:58:58.935509 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l64js"] Oct 09 09:59:00 crc kubenswrapper[4710]: I1009 09:59:00.877105 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-l64js" podUID="8fe184a2-dfb0-411c-b9cc-b257ca358031" containerName="registry-server" containerID="cri-o://0d92bb3906cd200cf79e705c1c25c94dfe55b649cb4aaf497d7681a5c2874221" gracePeriod=2 Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.431958 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l64js" Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.490579 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fe184a2-dfb0-411c-b9cc-b257ca358031-catalog-content\") pod \"8fe184a2-dfb0-411c-b9cc-b257ca358031\" (UID: \"8fe184a2-dfb0-411c-b9cc-b257ca358031\") " Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.491007 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qvbvt\" (UniqueName: \"kubernetes.io/projected/8fe184a2-dfb0-411c-b9cc-b257ca358031-kube-api-access-qvbvt\") pod \"8fe184a2-dfb0-411c-b9cc-b257ca358031\" (UID: \"8fe184a2-dfb0-411c-b9cc-b257ca358031\") " Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.491416 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fe184a2-dfb0-411c-b9cc-b257ca358031-utilities\") pod \"8fe184a2-dfb0-411c-b9cc-b257ca358031\" (UID: \"8fe184a2-dfb0-411c-b9cc-b257ca358031\") " Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.491939 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fe184a2-dfb0-411c-b9cc-b257ca358031-utilities" (OuterVolumeSpecName: "utilities") pod "8fe184a2-dfb0-411c-b9cc-b257ca358031" (UID: "8fe184a2-dfb0-411c-b9cc-b257ca358031"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.492401 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8fe184a2-dfb0-411c-b9cc-b257ca358031-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.497558 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fe184a2-dfb0-411c-b9cc-b257ca358031-kube-api-access-qvbvt" (OuterVolumeSpecName: "kube-api-access-qvbvt") pod "8fe184a2-dfb0-411c-b9cc-b257ca358031" (UID: "8fe184a2-dfb0-411c-b9cc-b257ca358031"). InnerVolumeSpecName "kube-api-access-qvbvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.502448 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fe184a2-dfb0-411c-b9cc-b257ca358031-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8fe184a2-dfb0-411c-b9cc-b257ca358031" (UID: "8fe184a2-dfb0-411c-b9cc-b257ca358031"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.596112 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8fe184a2-dfb0-411c-b9cc-b257ca358031-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.596719 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qvbvt\" (UniqueName: \"kubernetes.io/projected/8fe184a2-dfb0-411c-b9cc-b257ca358031-kube-api-access-qvbvt\") on node \"crc\" DevicePath \"\"" Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.890275 4710 generic.go:334] "Generic (PLEG): container finished" podID="8fe184a2-dfb0-411c-b9cc-b257ca358031" containerID="0d92bb3906cd200cf79e705c1c25c94dfe55b649cb4aaf497d7681a5c2874221" exitCode=0 Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.890329 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l64js" event={"ID":"8fe184a2-dfb0-411c-b9cc-b257ca358031","Type":"ContainerDied","Data":"0d92bb3906cd200cf79e705c1c25c94dfe55b649cb4aaf497d7681a5c2874221"} Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.890365 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-l64js" event={"ID":"8fe184a2-dfb0-411c-b9cc-b257ca358031","Type":"ContainerDied","Data":"4cc58148551d179d21128c451fdbd39cc5232d7c29e6c78216f1fc71fa074e4d"} Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.890395 4710 scope.go:117] "RemoveContainer" containerID="0d92bb3906cd200cf79e705c1c25c94dfe55b649cb4aaf497d7681a5c2874221" Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.890417 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-l64js" Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.917008 4710 scope.go:117] "RemoveContainer" containerID="b8e56e949265d3f601c39d57ac36ac15fe67cc1e3e4f05a2166dcead11b6854f" Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.926468 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-l64js"] Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.931354 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-l64js"] Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.945129 4710 scope.go:117] "RemoveContainer" containerID="1ac321b8c5139354b1bf3e31b4511cc0a46a31166f51a522c133a3e925f4a26e" Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.981308 4710 scope.go:117] "RemoveContainer" containerID="0d92bb3906cd200cf79e705c1c25c94dfe55b649cb4aaf497d7681a5c2874221" Oct 09 09:59:01 crc kubenswrapper[4710]: E1009 09:59:01.981745 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d92bb3906cd200cf79e705c1c25c94dfe55b649cb4aaf497d7681a5c2874221\": container with ID starting with 0d92bb3906cd200cf79e705c1c25c94dfe55b649cb4aaf497d7681a5c2874221 not found: ID does not exist" containerID="0d92bb3906cd200cf79e705c1c25c94dfe55b649cb4aaf497d7681a5c2874221" Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.981797 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d92bb3906cd200cf79e705c1c25c94dfe55b649cb4aaf497d7681a5c2874221"} err="failed to get container status \"0d92bb3906cd200cf79e705c1c25c94dfe55b649cb4aaf497d7681a5c2874221\": rpc error: code = NotFound desc = could not find container \"0d92bb3906cd200cf79e705c1c25c94dfe55b649cb4aaf497d7681a5c2874221\": container with ID starting with 0d92bb3906cd200cf79e705c1c25c94dfe55b649cb4aaf497d7681a5c2874221 not found: ID does not exist" Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.981817 4710 scope.go:117] "RemoveContainer" containerID="b8e56e949265d3f601c39d57ac36ac15fe67cc1e3e4f05a2166dcead11b6854f" Oct 09 09:59:01 crc kubenswrapper[4710]: E1009 09:59:01.982173 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8e56e949265d3f601c39d57ac36ac15fe67cc1e3e4f05a2166dcead11b6854f\": container with ID starting with b8e56e949265d3f601c39d57ac36ac15fe67cc1e3e4f05a2166dcead11b6854f not found: ID does not exist" containerID="b8e56e949265d3f601c39d57ac36ac15fe67cc1e3e4f05a2166dcead11b6854f" Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.982214 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8e56e949265d3f601c39d57ac36ac15fe67cc1e3e4f05a2166dcead11b6854f"} err="failed to get container status \"b8e56e949265d3f601c39d57ac36ac15fe67cc1e3e4f05a2166dcead11b6854f\": rpc error: code = NotFound desc = could not find container \"b8e56e949265d3f601c39d57ac36ac15fe67cc1e3e4f05a2166dcead11b6854f\": container with ID starting with b8e56e949265d3f601c39d57ac36ac15fe67cc1e3e4f05a2166dcead11b6854f not found: ID does not exist" Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.982233 4710 scope.go:117] "RemoveContainer" containerID="1ac321b8c5139354b1bf3e31b4511cc0a46a31166f51a522c133a3e925f4a26e" Oct 09 09:59:01 crc kubenswrapper[4710]: E1009 09:59:01.982677 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ac321b8c5139354b1bf3e31b4511cc0a46a31166f51a522c133a3e925f4a26e\": container with ID starting with 1ac321b8c5139354b1bf3e31b4511cc0a46a31166f51a522c133a3e925f4a26e not found: ID does not exist" containerID="1ac321b8c5139354b1bf3e31b4511cc0a46a31166f51a522c133a3e925f4a26e" Oct 09 09:59:01 crc kubenswrapper[4710]: I1009 09:59:01.982721 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ac321b8c5139354b1bf3e31b4511cc0a46a31166f51a522c133a3e925f4a26e"} err="failed to get container status \"1ac321b8c5139354b1bf3e31b4511cc0a46a31166f51a522c133a3e925f4a26e\": rpc error: code = NotFound desc = could not find container \"1ac321b8c5139354b1bf3e31b4511cc0a46a31166f51a522c133a3e925f4a26e\": container with ID starting with 1ac321b8c5139354b1bf3e31b4511cc0a46a31166f51a522c133a3e925f4a26e not found: ID does not exist" Oct 09 09:59:02 crc kubenswrapper[4710]: I1009 09:59:02.827399 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fe184a2-dfb0-411c-b9cc-b257ca358031" path="/var/lib/kubelet/pods/8fe184a2-dfb0-411c-b9cc-b257ca358031/volumes" Oct 09 10:00:00 crc kubenswrapper[4710]: I1009 10:00:00.162453 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333400-n8fl7"] Oct 09 10:00:00 crc kubenswrapper[4710]: E1009 10:00:00.163462 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fe184a2-dfb0-411c-b9cc-b257ca358031" containerName="extract-utilities" Oct 09 10:00:00 crc kubenswrapper[4710]: I1009 10:00:00.163475 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fe184a2-dfb0-411c-b9cc-b257ca358031" containerName="extract-utilities" Oct 09 10:00:00 crc kubenswrapper[4710]: E1009 10:00:00.163495 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fe184a2-dfb0-411c-b9cc-b257ca358031" containerName="extract-content" Oct 09 10:00:00 crc kubenswrapper[4710]: I1009 10:00:00.163501 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fe184a2-dfb0-411c-b9cc-b257ca358031" containerName="extract-content" Oct 09 10:00:00 crc kubenswrapper[4710]: E1009 10:00:00.163527 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fe184a2-dfb0-411c-b9cc-b257ca358031" containerName="registry-server" Oct 09 10:00:00 crc kubenswrapper[4710]: I1009 10:00:00.163533 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fe184a2-dfb0-411c-b9cc-b257ca358031" containerName="registry-server" Oct 09 10:00:00 crc kubenswrapper[4710]: I1009 10:00:00.163732 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fe184a2-dfb0-411c-b9cc-b257ca358031" containerName="registry-server" Oct 09 10:00:00 crc kubenswrapper[4710]: I1009 10:00:00.164323 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333400-n8fl7" Oct 09 10:00:00 crc kubenswrapper[4710]: I1009 10:00:00.166355 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 09 10:00:00 crc kubenswrapper[4710]: I1009 10:00:00.167750 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 09 10:00:00 crc kubenswrapper[4710]: I1009 10:00:00.169952 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333400-n8fl7"] Oct 09 10:00:00 crc kubenswrapper[4710]: I1009 10:00:00.195379 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ck6r2\" (UniqueName: \"kubernetes.io/projected/9fe6fb27-bf62-427e-b1f4-426e55c73e0e-kube-api-access-ck6r2\") pod \"collect-profiles-29333400-n8fl7\" (UID: \"9fe6fb27-bf62-427e-b1f4-426e55c73e0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333400-n8fl7" Oct 09 10:00:00 crc kubenswrapper[4710]: I1009 10:00:00.195601 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9fe6fb27-bf62-427e-b1f4-426e55c73e0e-secret-volume\") pod \"collect-profiles-29333400-n8fl7\" (UID: \"9fe6fb27-bf62-427e-b1f4-426e55c73e0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333400-n8fl7" Oct 09 10:00:00 crc kubenswrapper[4710]: I1009 10:00:00.195767 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9fe6fb27-bf62-427e-b1f4-426e55c73e0e-config-volume\") pod \"collect-profiles-29333400-n8fl7\" (UID: \"9fe6fb27-bf62-427e-b1f4-426e55c73e0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333400-n8fl7" Oct 09 10:00:00 crc kubenswrapper[4710]: I1009 10:00:00.297488 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9fe6fb27-bf62-427e-b1f4-426e55c73e0e-secret-volume\") pod \"collect-profiles-29333400-n8fl7\" (UID: \"9fe6fb27-bf62-427e-b1f4-426e55c73e0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333400-n8fl7" Oct 09 10:00:00 crc kubenswrapper[4710]: I1009 10:00:00.297553 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9fe6fb27-bf62-427e-b1f4-426e55c73e0e-config-volume\") pod \"collect-profiles-29333400-n8fl7\" (UID: \"9fe6fb27-bf62-427e-b1f4-426e55c73e0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333400-n8fl7" Oct 09 10:00:00 crc kubenswrapper[4710]: I1009 10:00:00.297671 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ck6r2\" (UniqueName: \"kubernetes.io/projected/9fe6fb27-bf62-427e-b1f4-426e55c73e0e-kube-api-access-ck6r2\") pod \"collect-profiles-29333400-n8fl7\" (UID: \"9fe6fb27-bf62-427e-b1f4-426e55c73e0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333400-n8fl7" Oct 09 10:00:00 crc kubenswrapper[4710]: I1009 10:00:00.298720 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9fe6fb27-bf62-427e-b1f4-426e55c73e0e-config-volume\") pod \"collect-profiles-29333400-n8fl7\" (UID: \"9fe6fb27-bf62-427e-b1f4-426e55c73e0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333400-n8fl7" Oct 09 10:00:00 crc kubenswrapper[4710]: I1009 10:00:00.303967 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9fe6fb27-bf62-427e-b1f4-426e55c73e0e-secret-volume\") pod \"collect-profiles-29333400-n8fl7\" (UID: \"9fe6fb27-bf62-427e-b1f4-426e55c73e0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333400-n8fl7" Oct 09 10:00:00 crc kubenswrapper[4710]: I1009 10:00:00.313705 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ck6r2\" (UniqueName: \"kubernetes.io/projected/9fe6fb27-bf62-427e-b1f4-426e55c73e0e-kube-api-access-ck6r2\") pod \"collect-profiles-29333400-n8fl7\" (UID: \"9fe6fb27-bf62-427e-b1f4-426e55c73e0e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333400-n8fl7" Oct 09 10:00:00 crc kubenswrapper[4710]: I1009 10:00:00.483769 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333400-n8fl7" Oct 09 10:00:00 crc kubenswrapper[4710]: I1009 10:00:00.880376 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333400-n8fl7"] Oct 09 10:00:00 crc kubenswrapper[4710]: W1009 10:00:00.890707 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9fe6fb27_bf62_427e_b1f4_426e55c73e0e.slice/crio-4c5ff1d964d2d4e98a3313c9fd8a9f86283535a05cd826052eda1f26e055a7d8 WatchSource:0}: Error finding container 4c5ff1d964d2d4e98a3313c9fd8a9f86283535a05cd826052eda1f26e055a7d8: Status 404 returned error can't find the container with id 4c5ff1d964d2d4e98a3313c9fd8a9f86283535a05cd826052eda1f26e055a7d8 Oct 09 10:00:01 crc kubenswrapper[4710]: I1009 10:00:01.296878 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333400-n8fl7" event={"ID":"9fe6fb27-bf62-427e-b1f4-426e55c73e0e","Type":"ContainerStarted","Data":"275b90a81a871003cca44b35e1ace70f1b1c5176a04612fc49b54145c9156733"} Oct 09 10:00:01 crc kubenswrapper[4710]: I1009 10:00:01.297129 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333400-n8fl7" event={"ID":"9fe6fb27-bf62-427e-b1f4-426e55c73e0e","Type":"ContainerStarted","Data":"4c5ff1d964d2d4e98a3313c9fd8a9f86283535a05cd826052eda1f26e055a7d8"} Oct 09 10:00:01 crc kubenswrapper[4710]: I1009 10:00:01.318646 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29333400-n8fl7" podStartSLOduration=1.318630131 podStartE2EDuration="1.318630131s" podCreationTimestamp="2025-10-09 10:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 10:00:01.311901372 +0000 UTC m=+3324.802009769" watchObservedRunningTime="2025-10-09 10:00:01.318630131 +0000 UTC m=+3324.808738529" Oct 09 10:00:02 crc kubenswrapper[4710]: I1009 10:00:02.305182 4710 generic.go:334] "Generic (PLEG): container finished" podID="9fe6fb27-bf62-427e-b1f4-426e55c73e0e" containerID="275b90a81a871003cca44b35e1ace70f1b1c5176a04612fc49b54145c9156733" exitCode=0 Oct 09 10:00:02 crc kubenswrapper[4710]: I1009 10:00:02.305242 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333400-n8fl7" event={"ID":"9fe6fb27-bf62-427e-b1f4-426e55c73e0e","Type":"ContainerDied","Data":"275b90a81a871003cca44b35e1ace70f1b1c5176a04612fc49b54145c9156733"} Oct 09 10:00:03 crc kubenswrapper[4710]: I1009 10:00:03.715396 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333400-n8fl7" Oct 09 10:00:03 crc kubenswrapper[4710]: I1009 10:00:03.852923 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ck6r2\" (UniqueName: \"kubernetes.io/projected/9fe6fb27-bf62-427e-b1f4-426e55c73e0e-kube-api-access-ck6r2\") pod \"9fe6fb27-bf62-427e-b1f4-426e55c73e0e\" (UID: \"9fe6fb27-bf62-427e-b1f4-426e55c73e0e\") " Oct 09 10:00:03 crc kubenswrapper[4710]: I1009 10:00:03.853004 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9fe6fb27-bf62-427e-b1f4-426e55c73e0e-config-volume\") pod \"9fe6fb27-bf62-427e-b1f4-426e55c73e0e\" (UID: \"9fe6fb27-bf62-427e-b1f4-426e55c73e0e\") " Oct 09 10:00:03 crc kubenswrapper[4710]: I1009 10:00:03.853031 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9fe6fb27-bf62-427e-b1f4-426e55c73e0e-secret-volume\") pod \"9fe6fb27-bf62-427e-b1f4-426e55c73e0e\" (UID: \"9fe6fb27-bf62-427e-b1f4-426e55c73e0e\") " Oct 09 10:00:03 crc kubenswrapper[4710]: I1009 10:00:03.853834 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9fe6fb27-bf62-427e-b1f4-426e55c73e0e-config-volume" (OuterVolumeSpecName: "config-volume") pod "9fe6fb27-bf62-427e-b1f4-426e55c73e0e" (UID: "9fe6fb27-bf62-427e-b1f4-426e55c73e0e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 10:00:03 crc kubenswrapper[4710]: I1009 10:00:03.857772 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fe6fb27-bf62-427e-b1f4-426e55c73e0e-kube-api-access-ck6r2" (OuterVolumeSpecName: "kube-api-access-ck6r2") pod "9fe6fb27-bf62-427e-b1f4-426e55c73e0e" (UID: "9fe6fb27-bf62-427e-b1f4-426e55c73e0e"). InnerVolumeSpecName "kube-api-access-ck6r2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 10:00:03 crc kubenswrapper[4710]: I1009 10:00:03.857868 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fe6fb27-bf62-427e-b1f4-426e55c73e0e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9fe6fb27-bf62-427e-b1f4-426e55c73e0e" (UID: "9fe6fb27-bf62-427e-b1f4-426e55c73e0e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 10:00:03 crc kubenswrapper[4710]: I1009 10:00:03.955657 4710 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9fe6fb27-bf62-427e-b1f4-426e55c73e0e-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 10:00:03 crc kubenswrapper[4710]: I1009 10:00:03.955682 4710 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9fe6fb27-bf62-427e-b1f4-426e55c73e0e-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 09 10:00:03 crc kubenswrapper[4710]: I1009 10:00:03.955693 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ck6r2\" (UniqueName: \"kubernetes.io/projected/9fe6fb27-bf62-427e-b1f4-426e55c73e0e-kube-api-access-ck6r2\") on node \"crc\" DevicePath \"\"" Oct 09 10:00:04 crc kubenswrapper[4710]: I1009 10:00:04.322947 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333400-n8fl7" event={"ID":"9fe6fb27-bf62-427e-b1f4-426e55c73e0e","Type":"ContainerDied","Data":"4c5ff1d964d2d4e98a3313c9fd8a9f86283535a05cd826052eda1f26e055a7d8"} Oct 09 10:00:04 crc kubenswrapper[4710]: I1009 10:00:04.322986 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c5ff1d964d2d4e98a3313c9fd8a9f86283535a05cd826052eda1f26e055a7d8" Oct 09 10:00:04 crc kubenswrapper[4710]: I1009 10:00:04.323035 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333400-n8fl7" Oct 09 10:00:04 crc kubenswrapper[4710]: I1009 10:00:04.776040 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333355-zg47v"] Oct 09 10:00:04 crc kubenswrapper[4710]: I1009 10:00:04.781527 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333355-zg47v"] Oct 09 10:00:04 crc kubenswrapper[4710]: I1009 10:00:04.824083 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94fa45a8-fffa-4fb1-bb72-8a21c2825e96" path="/var/lib/kubelet/pods/94fa45a8-fffa-4fb1-bb72-8a21c2825e96/volumes" Oct 09 10:00:27 crc kubenswrapper[4710]: I1009 10:00:27.712882 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-47wjg"] Oct 09 10:00:27 crc kubenswrapper[4710]: E1009 10:00:27.713693 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fe6fb27-bf62-427e-b1f4-426e55c73e0e" containerName="collect-profiles" Oct 09 10:00:27 crc kubenswrapper[4710]: I1009 10:00:27.713704 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fe6fb27-bf62-427e-b1f4-426e55c73e0e" containerName="collect-profiles" Oct 09 10:00:27 crc kubenswrapper[4710]: I1009 10:00:27.713884 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fe6fb27-bf62-427e-b1f4-426e55c73e0e" containerName="collect-profiles" Oct 09 10:00:27 crc kubenswrapper[4710]: I1009 10:00:27.715104 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-47wjg" Oct 09 10:00:27 crc kubenswrapper[4710]: I1009 10:00:27.721692 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-47wjg"] Oct 09 10:00:27 crc kubenswrapper[4710]: I1009 10:00:27.842350 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4xts\" (UniqueName: \"kubernetes.io/projected/9a1da609-cb98-4d70-b39e-9ad9d7e7710f-kube-api-access-f4xts\") pod \"community-operators-47wjg\" (UID: \"9a1da609-cb98-4d70-b39e-9ad9d7e7710f\") " pod="openshift-marketplace/community-operators-47wjg" Oct 09 10:00:27 crc kubenswrapper[4710]: I1009 10:00:27.842412 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a1da609-cb98-4d70-b39e-9ad9d7e7710f-utilities\") pod \"community-operators-47wjg\" (UID: \"9a1da609-cb98-4d70-b39e-9ad9d7e7710f\") " pod="openshift-marketplace/community-operators-47wjg" Oct 09 10:00:27 crc kubenswrapper[4710]: I1009 10:00:27.842455 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a1da609-cb98-4d70-b39e-9ad9d7e7710f-catalog-content\") pod \"community-operators-47wjg\" (UID: \"9a1da609-cb98-4d70-b39e-9ad9d7e7710f\") " pod="openshift-marketplace/community-operators-47wjg" Oct 09 10:00:27 crc kubenswrapper[4710]: I1009 10:00:27.943649 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4xts\" (UniqueName: \"kubernetes.io/projected/9a1da609-cb98-4d70-b39e-9ad9d7e7710f-kube-api-access-f4xts\") pod \"community-operators-47wjg\" (UID: \"9a1da609-cb98-4d70-b39e-9ad9d7e7710f\") " pod="openshift-marketplace/community-operators-47wjg" Oct 09 10:00:27 crc kubenswrapper[4710]: I1009 10:00:27.943915 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a1da609-cb98-4d70-b39e-9ad9d7e7710f-utilities\") pod \"community-operators-47wjg\" (UID: \"9a1da609-cb98-4d70-b39e-9ad9d7e7710f\") " pod="openshift-marketplace/community-operators-47wjg" Oct 09 10:00:27 crc kubenswrapper[4710]: I1009 10:00:27.944004 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a1da609-cb98-4d70-b39e-9ad9d7e7710f-catalog-content\") pod \"community-operators-47wjg\" (UID: \"9a1da609-cb98-4d70-b39e-9ad9d7e7710f\") " pod="openshift-marketplace/community-operators-47wjg" Oct 09 10:00:27 crc kubenswrapper[4710]: I1009 10:00:27.944414 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a1da609-cb98-4d70-b39e-9ad9d7e7710f-utilities\") pod \"community-operators-47wjg\" (UID: \"9a1da609-cb98-4d70-b39e-9ad9d7e7710f\") " pod="openshift-marketplace/community-operators-47wjg" Oct 09 10:00:27 crc kubenswrapper[4710]: I1009 10:00:27.944499 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a1da609-cb98-4d70-b39e-9ad9d7e7710f-catalog-content\") pod \"community-operators-47wjg\" (UID: \"9a1da609-cb98-4d70-b39e-9ad9d7e7710f\") " pod="openshift-marketplace/community-operators-47wjg" Oct 09 10:00:27 crc kubenswrapper[4710]: I1009 10:00:27.967277 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4xts\" (UniqueName: \"kubernetes.io/projected/9a1da609-cb98-4d70-b39e-9ad9d7e7710f-kube-api-access-f4xts\") pod \"community-operators-47wjg\" (UID: \"9a1da609-cb98-4d70-b39e-9ad9d7e7710f\") " pod="openshift-marketplace/community-operators-47wjg" Oct 09 10:00:28 crc kubenswrapper[4710]: I1009 10:00:28.032838 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-47wjg" Oct 09 10:00:28 crc kubenswrapper[4710]: I1009 10:00:28.535736 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-47wjg"] Oct 09 10:00:29 crc kubenswrapper[4710]: I1009 10:00:29.482598 4710 generic.go:334] "Generic (PLEG): container finished" podID="9a1da609-cb98-4d70-b39e-9ad9d7e7710f" containerID="9637c6f53bec25b81428d95da732eaebf948ace03cc48f50b969a83a85d0b863" exitCode=0 Oct 09 10:00:29 crc kubenswrapper[4710]: I1009 10:00:29.482639 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-47wjg" event={"ID":"9a1da609-cb98-4d70-b39e-9ad9d7e7710f","Type":"ContainerDied","Data":"9637c6f53bec25b81428d95da732eaebf948ace03cc48f50b969a83a85d0b863"} Oct 09 10:00:29 crc kubenswrapper[4710]: I1009 10:00:29.482663 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-47wjg" event={"ID":"9a1da609-cb98-4d70-b39e-9ad9d7e7710f","Type":"ContainerStarted","Data":"5293d9b9a229d951f92520ec1b39c4a91eed452665e80d1fd4c54721751fc26c"} Oct 09 10:00:30 crc kubenswrapper[4710]: I1009 10:00:30.492304 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-47wjg" event={"ID":"9a1da609-cb98-4d70-b39e-9ad9d7e7710f","Type":"ContainerStarted","Data":"98cd5f67956cc37ec9d7facfdf224f93a2db8c43860409135036603b587fb4cb"} Oct 09 10:00:31 crc kubenswrapper[4710]: I1009 10:00:31.500443 4710 generic.go:334] "Generic (PLEG): container finished" podID="9a1da609-cb98-4d70-b39e-9ad9d7e7710f" containerID="98cd5f67956cc37ec9d7facfdf224f93a2db8c43860409135036603b587fb4cb" exitCode=0 Oct 09 10:00:31 crc kubenswrapper[4710]: I1009 10:00:31.500529 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-47wjg" event={"ID":"9a1da609-cb98-4d70-b39e-9ad9d7e7710f","Type":"ContainerDied","Data":"98cd5f67956cc37ec9d7facfdf224f93a2db8c43860409135036603b587fb4cb"} Oct 09 10:00:32 crc kubenswrapper[4710]: I1009 10:00:32.508519 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-47wjg" event={"ID":"9a1da609-cb98-4d70-b39e-9ad9d7e7710f","Type":"ContainerStarted","Data":"16c2dbc87989f3e88a8b4c9f90a01396761a9f587a8d124211b91832773e219c"} Oct 09 10:00:32 crc kubenswrapper[4710]: I1009 10:00:32.525327 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-47wjg" podStartSLOduration=2.956771755 podStartE2EDuration="5.525310631s" podCreationTimestamp="2025-10-09 10:00:27 +0000 UTC" firstStartedPulling="2025-10-09 10:00:29.484502948 +0000 UTC m=+3352.974611344" lastFinishedPulling="2025-10-09 10:00:32.053041823 +0000 UTC m=+3355.543150220" observedRunningTime="2025-10-09 10:00:32.519653664 +0000 UTC m=+3356.009762062" watchObservedRunningTime="2025-10-09 10:00:32.525310631 +0000 UTC m=+3356.015419029" Oct 09 10:00:35 crc kubenswrapper[4710]: I1009 10:00:35.545768 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 10:00:35 crc kubenswrapper[4710]: I1009 10:00:35.546232 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 10:00:38 crc kubenswrapper[4710]: I1009 10:00:38.033244 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-47wjg" Oct 09 10:00:38 crc kubenswrapper[4710]: I1009 10:00:38.033649 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-47wjg" Oct 09 10:00:38 crc kubenswrapper[4710]: I1009 10:00:38.066914 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-47wjg" Oct 09 10:00:38 crc kubenswrapper[4710]: I1009 10:00:38.579908 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-47wjg" Oct 09 10:00:38 crc kubenswrapper[4710]: I1009 10:00:38.616724 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-47wjg"] Oct 09 10:00:40 crc kubenswrapper[4710]: I1009 10:00:40.559006 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-47wjg" podUID="9a1da609-cb98-4d70-b39e-9ad9d7e7710f" containerName="registry-server" containerID="cri-o://16c2dbc87989f3e88a8b4c9f90a01396761a9f587a8d124211b91832773e219c" gracePeriod=2 Oct 09 10:00:40 crc kubenswrapper[4710]: I1009 10:00:40.977011 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-47wjg" Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.100838 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a1da609-cb98-4d70-b39e-9ad9d7e7710f-catalog-content\") pod \"9a1da609-cb98-4d70-b39e-9ad9d7e7710f\" (UID: \"9a1da609-cb98-4d70-b39e-9ad9d7e7710f\") " Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.101269 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f4xts\" (UniqueName: \"kubernetes.io/projected/9a1da609-cb98-4d70-b39e-9ad9d7e7710f-kube-api-access-f4xts\") pod \"9a1da609-cb98-4d70-b39e-9ad9d7e7710f\" (UID: \"9a1da609-cb98-4d70-b39e-9ad9d7e7710f\") " Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.101347 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a1da609-cb98-4d70-b39e-9ad9d7e7710f-utilities\") pod \"9a1da609-cb98-4d70-b39e-9ad9d7e7710f\" (UID: \"9a1da609-cb98-4d70-b39e-9ad9d7e7710f\") " Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.102016 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a1da609-cb98-4d70-b39e-9ad9d7e7710f-utilities" (OuterVolumeSpecName: "utilities") pod "9a1da609-cb98-4d70-b39e-9ad9d7e7710f" (UID: "9a1da609-cb98-4d70-b39e-9ad9d7e7710f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.106084 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a1da609-cb98-4d70-b39e-9ad9d7e7710f-kube-api-access-f4xts" (OuterVolumeSpecName: "kube-api-access-f4xts") pod "9a1da609-cb98-4d70-b39e-9ad9d7e7710f" (UID: "9a1da609-cb98-4d70-b39e-9ad9d7e7710f"). InnerVolumeSpecName "kube-api-access-f4xts". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.141685 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a1da609-cb98-4d70-b39e-9ad9d7e7710f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9a1da609-cb98-4d70-b39e-9ad9d7e7710f" (UID: "9a1da609-cb98-4d70-b39e-9ad9d7e7710f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.203250 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a1da609-cb98-4d70-b39e-9ad9d7e7710f-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.203281 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a1da609-cb98-4d70-b39e-9ad9d7e7710f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.203293 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f4xts\" (UniqueName: \"kubernetes.io/projected/9a1da609-cb98-4d70-b39e-9ad9d7e7710f-kube-api-access-f4xts\") on node \"crc\" DevicePath \"\"" Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.568801 4710 generic.go:334] "Generic (PLEG): container finished" podID="9a1da609-cb98-4d70-b39e-9ad9d7e7710f" containerID="16c2dbc87989f3e88a8b4c9f90a01396761a9f587a8d124211b91832773e219c" exitCode=0 Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.568871 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-47wjg" Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.569531 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-47wjg" event={"ID":"9a1da609-cb98-4d70-b39e-9ad9d7e7710f","Type":"ContainerDied","Data":"16c2dbc87989f3e88a8b4c9f90a01396761a9f587a8d124211b91832773e219c"} Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.569578 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-47wjg" event={"ID":"9a1da609-cb98-4d70-b39e-9ad9d7e7710f","Type":"ContainerDied","Data":"5293d9b9a229d951f92520ec1b39c4a91eed452665e80d1fd4c54721751fc26c"} Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.569598 4710 scope.go:117] "RemoveContainer" containerID="16c2dbc87989f3e88a8b4c9f90a01396761a9f587a8d124211b91832773e219c" Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.585338 4710 scope.go:117] "RemoveContainer" containerID="98cd5f67956cc37ec9d7facfdf224f93a2db8c43860409135036603b587fb4cb" Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.592956 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-47wjg"] Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.601953 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-47wjg"] Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.614012 4710 scope.go:117] "RemoveContainer" containerID="9637c6f53bec25b81428d95da732eaebf948ace03cc48f50b969a83a85d0b863" Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.638067 4710 scope.go:117] "RemoveContainer" containerID="16c2dbc87989f3e88a8b4c9f90a01396761a9f587a8d124211b91832773e219c" Oct 09 10:00:41 crc kubenswrapper[4710]: E1009 10:00:41.638452 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16c2dbc87989f3e88a8b4c9f90a01396761a9f587a8d124211b91832773e219c\": container with ID starting with 16c2dbc87989f3e88a8b4c9f90a01396761a9f587a8d124211b91832773e219c not found: ID does not exist" containerID="16c2dbc87989f3e88a8b4c9f90a01396761a9f587a8d124211b91832773e219c" Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.638480 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16c2dbc87989f3e88a8b4c9f90a01396761a9f587a8d124211b91832773e219c"} err="failed to get container status \"16c2dbc87989f3e88a8b4c9f90a01396761a9f587a8d124211b91832773e219c\": rpc error: code = NotFound desc = could not find container \"16c2dbc87989f3e88a8b4c9f90a01396761a9f587a8d124211b91832773e219c\": container with ID starting with 16c2dbc87989f3e88a8b4c9f90a01396761a9f587a8d124211b91832773e219c not found: ID does not exist" Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.638500 4710 scope.go:117] "RemoveContainer" containerID="98cd5f67956cc37ec9d7facfdf224f93a2db8c43860409135036603b587fb4cb" Oct 09 10:00:41 crc kubenswrapper[4710]: E1009 10:00:41.638731 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98cd5f67956cc37ec9d7facfdf224f93a2db8c43860409135036603b587fb4cb\": container with ID starting with 98cd5f67956cc37ec9d7facfdf224f93a2db8c43860409135036603b587fb4cb not found: ID does not exist" containerID="98cd5f67956cc37ec9d7facfdf224f93a2db8c43860409135036603b587fb4cb" Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.638752 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98cd5f67956cc37ec9d7facfdf224f93a2db8c43860409135036603b587fb4cb"} err="failed to get container status \"98cd5f67956cc37ec9d7facfdf224f93a2db8c43860409135036603b587fb4cb\": rpc error: code = NotFound desc = could not find container \"98cd5f67956cc37ec9d7facfdf224f93a2db8c43860409135036603b587fb4cb\": container with ID starting with 98cd5f67956cc37ec9d7facfdf224f93a2db8c43860409135036603b587fb4cb not found: ID does not exist" Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.638766 4710 scope.go:117] "RemoveContainer" containerID="9637c6f53bec25b81428d95da732eaebf948ace03cc48f50b969a83a85d0b863" Oct 09 10:00:41 crc kubenswrapper[4710]: E1009 10:00:41.638999 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9637c6f53bec25b81428d95da732eaebf948ace03cc48f50b969a83a85d0b863\": container with ID starting with 9637c6f53bec25b81428d95da732eaebf948ace03cc48f50b969a83a85d0b863 not found: ID does not exist" containerID="9637c6f53bec25b81428d95da732eaebf948ace03cc48f50b969a83a85d0b863" Oct 09 10:00:41 crc kubenswrapper[4710]: I1009 10:00:41.639046 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9637c6f53bec25b81428d95da732eaebf948ace03cc48f50b969a83a85d0b863"} err="failed to get container status \"9637c6f53bec25b81428d95da732eaebf948ace03cc48f50b969a83a85d0b863\": rpc error: code = NotFound desc = could not find container \"9637c6f53bec25b81428d95da732eaebf948ace03cc48f50b969a83a85d0b863\": container with ID starting with 9637c6f53bec25b81428d95da732eaebf948ace03cc48f50b969a83a85d0b863 not found: ID does not exist" Oct 09 10:00:42 crc kubenswrapper[4710]: I1009 10:00:42.823111 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a1da609-cb98-4d70-b39e-9ad9d7e7710f" path="/var/lib/kubelet/pods/9a1da609-cb98-4d70-b39e-9ad9d7e7710f/volumes" Oct 09 10:00:52 crc kubenswrapper[4710]: I1009 10:00:52.502507 4710 scope.go:117] "RemoveContainer" containerID="f201b183d7a14b739cab32485256fd2b368745fdd119ff9070fed1eb60a3374b" Oct 09 10:01:00 crc kubenswrapper[4710]: I1009 10:01:00.135664 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29333401-56psm"] Oct 09 10:01:00 crc kubenswrapper[4710]: E1009 10:01:00.136516 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a1da609-cb98-4d70-b39e-9ad9d7e7710f" containerName="extract-utilities" Oct 09 10:01:00 crc kubenswrapper[4710]: I1009 10:01:00.136528 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a1da609-cb98-4d70-b39e-9ad9d7e7710f" containerName="extract-utilities" Oct 09 10:01:00 crc kubenswrapper[4710]: E1009 10:01:00.136556 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a1da609-cb98-4d70-b39e-9ad9d7e7710f" containerName="extract-content" Oct 09 10:01:00 crc kubenswrapper[4710]: I1009 10:01:00.136561 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a1da609-cb98-4d70-b39e-9ad9d7e7710f" containerName="extract-content" Oct 09 10:01:00 crc kubenswrapper[4710]: E1009 10:01:00.136570 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a1da609-cb98-4d70-b39e-9ad9d7e7710f" containerName="registry-server" Oct 09 10:01:00 crc kubenswrapper[4710]: I1009 10:01:00.136575 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a1da609-cb98-4d70-b39e-9ad9d7e7710f" containerName="registry-server" Oct 09 10:01:00 crc kubenswrapper[4710]: I1009 10:01:00.136724 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a1da609-cb98-4d70-b39e-9ad9d7e7710f" containerName="registry-server" Oct 09 10:01:00 crc kubenswrapper[4710]: I1009 10:01:00.137336 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29333401-56psm" Oct 09 10:01:00 crc kubenswrapper[4710]: I1009 10:01:00.154749 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29333401-56psm"] Oct 09 10:01:00 crc kubenswrapper[4710]: I1009 10:01:00.219383 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/208c883f-3974-4994-802b-6c3bcc416326-fernet-keys\") pod \"keystone-cron-29333401-56psm\" (UID: \"208c883f-3974-4994-802b-6c3bcc416326\") " pod="openstack/keystone-cron-29333401-56psm" Oct 09 10:01:00 crc kubenswrapper[4710]: I1009 10:01:00.219684 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/208c883f-3974-4994-802b-6c3bcc416326-combined-ca-bundle\") pod \"keystone-cron-29333401-56psm\" (UID: \"208c883f-3974-4994-802b-6c3bcc416326\") " pod="openstack/keystone-cron-29333401-56psm" Oct 09 10:01:00 crc kubenswrapper[4710]: I1009 10:01:00.219710 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/208c883f-3974-4994-802b-6c3bcc416326-config-data\") pod \"keystone-cron-29333401-56psm\" (UID: \"208c883f-3974-4994-802b-6c3bcc416326\") " pod="openstack/keystone-cron-29333401-56psm" Oct 09 10:01:00 crc kubenswrapper[4710]: I1009 10:01:00.219728 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7km6d\" (UniqueName: \"kubernetes.io/projected/208c883f-3974-4994-802b-6c3bcc416326-kube-api-access-7km6d\") pod \"keystone-cron-29333401-56psm\" (UID: \"208c883f-3974-4994-802b-6c3bcc416326\") " pod="openstack/keystone-cron-29333401-56psm" Oct 09 10:01:00 crc kubenswrapper[4710]: I1009 10:01:00.321221 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/208c883f-3974-4994-802b-6c3bcc416326-fernet-keys\") pod \"keystone-cron-29333401-56psm\" (UID: \"208c883f-3974-4994-802b-6c3bcc416326\") " pod="openstack/keystone-cron-29333401-56psm" Oct 09 10:01:00 crc kubenswrapper[4710]: I1009 10:01:00.321296 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/208c883f-3974-4994-802b-6c3bcc416326-combined-ca-bundle\") pod \"keystone-cron-29333401-56psm\" (UID: \"208c883f-3974-4994-802b-6c3bcc416326\") " pod="openstack/keystone-cron-29333401-56psm" Oct 09 10:01:00 crc kubenswrapper[4710]: I1009 10:01:00.321319 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/208c883f-3974-4994-802b-6c3bcc416326-config-data\") pod \"keystone-cron-29333401-56psm\" (UID: \"208c883f-3974-4994-802b-6c3bcc416326\") " pod="openstack/keystone-cron-29333401-56psm" Oct 09 10:01:00 crc kubenswrapper[4710]: I1009 10:01:00.321338 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7km6d\" (UniqueName: \"kubernetes.io/projected/208c883f-3974-4994-802b-6c3bcc416326-kube-api-access-7km6d\") pod \"keystone-cron-29333401-56psm\" (UID: \"208c883f-3974-4994-802b-6c3bcc416326\") " pod="openstack/keystone-cron-29333401-56psm" Oct 09 10:01:00 crc kubenswrapper[4710]: I1009 10:01:00.327028 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/208c883f-3974-4994-802b-6c3bcc416326-fernet-keys\") pod \"keystone-cron-29333401-56psm\" (UID: \"208c883f-3974-4994-802b-6c3bcc416326\") " pod="openstack/keystone-cron-29333401-56psm" Oct 09 10:01:00 crc kubenswrapper[4710]: I1009 10:01:00.327573 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/208c883f-3974-4994-802b-6c3bcc416326-config-data\") pod \"keystone-cron-29333401-56psm\" (UID: \"208c883f-3974-4994-802b-6c3bcc416326\") " pod="openstack/keystone-cron-29333401-56psm" Oct 09 10:01:00 crc kubenswrapper[4710]: I1009 10:01:00.335456 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/208c883f-3974-4994-802b-6c3bcc416326-combined-ca-bundle\") pod \"keystone-cron-29333401-56psm\" (UID: \"208c883f-3974-4994-802b-6c3bcc416326\") " pod="openstack/keystone-cron-29333401-56psm" Oct 09 10:01:00 crc kubenswrapper[4710]: I1009 10:01:00.353001 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7km6d\" (UniqueName: \"kubernetes.io/projected/208c883f-3974-4994-802b-6c3bcc416326-kube-api-access-7km6d\") pod \"keystone-cron-29333401-56psm\" (UID: \"208c883f-3974-4994-802b-6c3bcc416326\") " pod="openstack/keystone-cron-29333401-56psm" Oct 09 10:01:00 crc kubenswrapper[4710]: I1009 10:01:00.453182 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29333401-56psm" Oct 09 10:01:00 crc kubenswrapper[4710]: I1009 10:01:00.859813 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29333401-56psm"] Oct 09 10:01:01 crc kubenswrapper[4710]: I1009 10:01:01.700154 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29333401-56psm" event={"ID":"208c883f-3974-4994-802b-6c3bcc416326","Type":"ContainerStarted","Data":"92d033f313b09d52aa3e62b5bbb3a121edd97277dedd0b96c1e78fc83e386114"} Oct 09 10:01:01 crc kubenswrapper[4710]: I1009 10:01:01.700555 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29333401-56psm" event={"ID":"208c883f-3974-4994-802b-6c3bcc416326","Type":"ContainerStarted","Data":"a08e11e46a3b75ce64a3937bb91ad66a98b1d487f36e6ca85e42356b77b55460"} Oct 09 10:01:01 crc kubenswrapper[4710]: I1009 10:01:01.718211 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29333401-56psm" podStartSLOduration=1.718199671 podStartE2EDuration="1.718199671s" podCreationTimestamp="2025-10-09 10:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 10:01:01.716732594 +0000 UTC m=+3385.206840991" watchObservedRunningTime="2025-10-09 10:01:01.718199671 +0000 UTC m=+3385.208308067" Oct 09 10:01:03 crc kubenswrapper[4710]: I1009 10:01:03.714630 4710 generic.go:334] "Generic (PLEG): container finished" podID="208c883f-3974-4994-802b-6c3bcc416326" containerID="92d033f313b09d52aa3e62b5bbb3a121edd97277dedd0b96c1e78fc83e386114" exitCode=0 Oct 09 10:01:03 crc kubenswrapper[4710]: I1009 10:01:03.714712 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29333401-56psm" event={"ID":"208c883f-3974-4994-802b-6c3bcc416326","Type":"ContainerDied","Data":"92d033f313b09d52aa3e62b5bbb3a121edd97277dedd0b96c1e78fc83e386114"} Oct 09 10:01:05 crc kubenswrapper[4710]: I1009 10:01:05.037484 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-hwcfw"] Oct 09 10:01:05 crc kubenswrapper[4710]: I1009 10:01:05.043978 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-hwcfw"] Oct 09 10:01:05 crc kubenswrapper[4710]: I1009 10:01:05.132251 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29333401-56psm" Oct 09 10:01:05 crc kubenswrapper[4710]: I1009 10:01:05.212513 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7km6d\" (UniqueName: \"kubernetes.io/projected/208c883f-3974-4994-802b-6c3bcc416326-kube-api-access-7km6d\") pod \"208c883f-3974-4994-802b-6c3bcc416326\" (UID: \"208c883f-3974-4994-802b-6c3bcc416326\") " Oct 09 10:01:05 crc kubenswrapper[4710]: I1009 10:01:05.212634 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/208c883f-3974-4994-802b-6c3bcc416326-fernet-keys\") pod \"208c883f-3974-4994-802b-6c3bcc416326\" (UID: \"208c883f-3974-4994-802b-6c3bcc416326\") " Oct 09 10:01:05 crc kubenswrapper[4710]: I1009 10:01:05.212683 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/208c883f-3974-4994-802b-6c3bcc416326-combined-ca-bundle\") pod \"208c883f-3974-4994-802b-6c3bcc416326\" (UID: \"208c883f-3974-4994-802b-6c3bcc416326\") " Oct 09 10:01:05 crc kubenswrapper[4710]: I1009 10:01:05.212708 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/208c883f-3974-4994-802b-6c3bcc416326-config-data\") pod \"208c883f-3974-4994-802b-6c3bcc416326\" (UID: \"208c883f-3974-4994-802b-6c3bcc416326\") " Oct 09 10:01:05 crc kubenswrapper[4710]: I1009 10:01:05.221345 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/208c883f-3974-4994-802b-6c3bcc416326-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "208c883f-3974-4994-802b-6c3bcc416326" (UID: "208c883f-3974-4994-802b-6c3bcc416326"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 10:01:05 crc kubenswrapper[4710]: I1009 10:01:05.226728 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/208c883f-3974-4994-802b-6c3bcc416326-kube-api-access-7km6d" (OuterVolumeSpecName: "kube-api-access-7km6d") pod "208c883f-3974-4994-802b-6c3bcc416326" (UID: "208c883f-3974-4994-802b-6c3bcc416326"). InnerVolumeSpecName "kube-api-access-7km6d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 10:01:05 crc kubenswrapper[4710]: I1009 10:01:05.235298 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/208c883f-3974-4994-802b-6c3bcc416326-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "208c883f-3974-4994-802b-6c3bcc416326" (UID: "208c883f-3974-4994-802b-6c3bcc416326"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 10:01:05 crc kubenswrapper[4710]: I1009 10:01:05.252108 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/208c883f-3974-4994-802b-6c3bcc416326-config-data" (OuterVolumeSpecName: "config-data") pod "208c883f-3974-4994-802b-6c3bcc416326" (UID: "208c883f-3974-4994-802b-6c3bcc416326"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 10:01:05 crc kubenswrapper[4710]: I1009 10:01:05.314592 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7km6d\" (UniqueName: \"kubernetes.io/projected/208c883f-3974-4994-802b-6c3bcc416326-kube-api-access-7km6d\") on node \"crc\" DevicePath \"\"" Oct 09 10:01:05 crc kubenswrapper[4710]: I1009 10:01:05.314724 4710 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/208c883f-3974-4994-802b-6c3bcc416326-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 09 10:01:05 crc kubenswrapper[4710]: I1009 10:01:05.314786 4710 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/208c883f-3974-4994-802b-6c3bcc416326-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 09 10:01:05 crc kubenswrapper[4710]: I1009 10:01:05.314861 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/208c883f-3974-4994-802b-6c3bcc416326-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 10:01:05 crc kubenswrapper[4710]: I1009 10:01:05.545808 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 10:01:05 crc kubenswrapper[4710]: I1009 10:01:05.546059 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 10:01:05 crc kubenswrapper[4710]: I1009 10:01:05.729654 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29333401-56psm" event={"ID":"208c883f-3974-4994-802b-6c3bcc416326","Type":"ContainerDied","Data":"a08e11e46a3b75ce64a3937bb91ad66a98b1d487f36e6ca85e42356b77b55460"} Oct 09 10:01:05 crc kubenswrapper[4710]: I1009 10:01:05.729691 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a08e11e46a3b75ce64a3937bb91ad66a98b1d487f36e6ca85e42356b77b55460" Oct 09 10:01:05 crc kubenswrapper[4710]: I1009 10:01:05.729743 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29333401-56psm" Oct 09 10:01:06 crc kubenswrapper[4710]: I1009 10:01:06.823135 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5e73400-7814-42e4-879a-4193792ce918" path="/var/lib/kubelet/pods/b5e73400-7814-42e4-879a-4193792ce918/volumes" Oct 09 10:01:18 crc kubenswrapper[4710]: I1009 10:01:18.020654 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-aa4d-account-create-xzmtz"] Oct 09 10:01:18 crc kubenswrapper[4710]: I1009 10:01:18.026599 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-aa4d-account-create-xzmtz"] Oct 09 10:01:18 crc kubenswrapper[4710]: I1009 10:01:18.821378 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9aa93c2c-c428-4ae9-b34a-2b9203dcdb8c" path="/var/lib/kubelet/pods/9aa93c2c-c428-4ae9-b34a-2b9203dcdb8c/volumes" Oct 09 10:01:35 crc kubenswrapper[4710]: I1009 10:01:35.545999 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 10:01:35 crc kubenswrapper[4710]: I1009 10:01:35.546493 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 10:01:35 crc kubenswrapper[4710]: I1009 10:01:35.546535 4710 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 10:01:35 crc kubenswrapper[4710]: I1009 10:01:35.546953 4710 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"87d054944fb6c806eb0843e146a21517502d7028c16689af5a0b689040c6c2a2"} pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 10:01:35 crc kubenswrapper[4710]: I1009 10:01:35.547002 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" containerID="cri-o://87d054944fb6c806eb0843e146a21517502d7028c16689af5a0b689040c6c2a2" gracePeriod=600 Oct 09 10:01:35 crc kubenswrapper[4710]: I1009 10:01:35.931559 4710 generic.go:334] "Generic (PLEG): container finished" podID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerID="87d054944fb6c806eb0843e146a21517502d7028c16689af5a0b689040c6c2a2" exitCode=0 Oct 09 10:01:35 crc kubenswrapper[4710]: I1009 10:01:35.931761 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerDied","Data":"87d054944fb6c806eb0843e146a21517502d7028c16689af5a0b689040c6c2a2"} Oct 09 10:01:35 crc kubenswrapper[4710]: I1009 10:01:35.931786 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerStarted","Data":"a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d"} Oct 09 10:01:35 crc kubenswrapper[4710]: I1009 10:01:35.931801 4710 scope.go:117] "RemoveContainer" containerID="2ba3993b6f3bb5af3612d4ef56e2eb56adfb7fb9268048827e8ae032dbefea51" Oct 09 10:01:37 crc kubenswrapper[4710]: I1009 10:01:37.035052 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-qdvtf"] Oct 09 10:01:37 crc kubenswrapper[4710]: I1009 10:01:37.041155 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-qdvtf"] Oct 09 10:01:38 crc kubenswrapper[4710]: I1009 10:01:38.822575 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4cdc5804-ffca-4474-a386-1f803b4a1c23" path="/var/lib/kubelet/pods/4cdc5804-ffca-4474-a386-1f803b4a1c23/volumes" Oct 09 10:01:52 crc kubenswrapper[4710]: I1009 10:01:52.571968 4710 scope.go:117] "RemoveContainer" containerID="e8643c6d2ce0c98257dcfc4d9d3c532380f31b11d903756876aca4082f885ecd" Oct 09 10:01:52 crc kubenswrapper[4710]: I1009 10:01:52.590488 4710 scope.go:117] "RemoveContainer" containerID="559f6b680076735a0530b5807453e2c706113d1d831e465ae6fe57c8b08bedfe" Oct 09 10:01:52 crc kubenswrapper[4710]: I1009 10:01:52.635514 4710 scope.go:117] "RemoveContainer" containerID="5cdee0ea65674be2a87b16dbbd851745808c152f848a4f445fd99095cf5a10a1" Oct 09 10:01:58 crc kubenswrapper[4710]: I1009 10:01:58.467053 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-82lrf"] Oct 09 10:01:58 crc kubenswrapper[4710]: E1009 10:01:58.467969 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="208c883f-3974-4994-802b-6c3bcc416326" containerName="keystone-cron" Oct 09 10:01:58 crc kubenswrapper[4710]: I1009 10:01:58.467981 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="208c883f-3974-4994-802b-6c3bcc416326" containerName="keystone-cron" Oct 09 10:01:58 crc kubenswrapper[4710]: I1009 10:01:58.468146 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="208c883f-3974-4994-802b-6c3bcc416326" containerName="keystone-cron" Oct 09 10:01:58 crc kubenswrapper[4710]: I1009 10:01:58.469394 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-82lrf" Oct 09 10:01:58 crc kubenswrapper[4710]: I1009 10:01:58.473166 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-82lrf"] Oct 09 10:01:58 crc kubenswrapper[4710]: I1009 10:01:58.514415 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9846f4ea-1150-4743-b30e-a59c8ab8233c-utilities\") pod \"redhat-operators-82lrf\" (UID: \"9846f4ea-1150-4743-b30e-a59c8ab8233c\") " pod="openshift-marketplace/redhat-operators-82lrf" Oct 09 10:01:58 crc kubenswrapper[4710]: I1009 10:01:58.514634 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5ml6\" (UniqueName: \"kubernetes.io/projected/9846f4ea-1150-4743-b30e-a59c8ab8233c-kube-api-access-t5ml6\") pod \"redhat-operators-82lrf\" (UID: \"9846f4ea-1150-4743-b30e-a59c8ab8233c\") " pod="openshift-marketplace/redhat-operators-82lrf" Oct 09 10:01:58 crc kubenswrapper[4710]: I1009 10:01:58.514744 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9846f4ea-1150-4743-b30e-a59c8ab8233c-catalog-content\") pod \"redhat-operators-82lrf\" (UID: \"9846f4ea-1150-4743-b30e-a59c8ab8233c\") " pod="openshift-marketplace/redhat-operators-82lrf" Oct 09 10:01:58 crc kubenswrapper[4710]: I1009 10:01:58.615979 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9846f4ea-1150-4743-b30e-a59c8ab8233c-catalog-content\") pod \"redhat-operators-82lrf\" (UID: \"9846f4ea-1150-4743-b30e-a59c8ab8233c\") " pod="openshift-marketplace/redhat-operators-82lrf" Oct 09 10:01:58 crc kubenswrapper[4710]: I1009 10:01:58.616552 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9846f4ea-1150-4743-b30e-a59c8ab8233c-catalog-content\") pod \"redhat-operators-82lrf\" (UID: \"9846f4ea-1150-4743-b30e-a59c8ab8233c\") " pod="openshift-marketplace/redhat-operators-82lrf" Oct 09 10:01:58 crc kubenswrapper[4710]: I1009 10:01:58.616685 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9846f4ea-1150-4743-b30e-a59c8ab8233c-utilities\") pod \"redhat-operators-82lrf\" (UID: \"9846f4ea-1150-4743-b30e-a59c8ab8233c\") " pod="openshift-marketplace/redhat-operators-82lrf" Oct 09 10:01:58 crc kubenswrapper[4710]: I1009 10:01:58.616825 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5ml6\" (UniqueName: \"kubernetes.io/projected/9846f4ea-1150-4743-b30e-a59c8ab8233c-kube-api-access-t5ml6\") pod \"redhat-operators-82lrf\" (UID: \"9846f4ea-1150-4743-b30e-a59c8ab8233c\") " pod="openshift-marketplace/redhat-operators-82lrf" Oct 09 10:01:58 crc kubenswrapper[4710]: I1009 10:01:58.616944 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9846f4ea-1150-4743-b30e-a59c8ab8233c-utilities\") pod \"redhat-operators-82lrf\" (UID: \"9846f4ea-1150-4743-b30e-a59c8ab8233c\") " pod="openshift-marketplace/redhat-operators-82lrf" Oct 09 10:01:58 crc kubenswrapper[4710]: I1009 10:01:58.633306 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5ml6\" (UniqueName: \"kubernetes.io/projected/9846f4ea-1150-4743-b30e-a59c8ab8233c-kube-api-access-t5ml6\") pod \"redhat-operators-82lrf\" (UID: \"9846f4ea-1150-4743-b30e-a59c8ab8233c\") " pod="openshift-marketplace/redhat-operators-82lrf" Oct 09 10:01:58 crc kubenswrapper[4710]: I1009 10:01:58.793848 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-82lrf" Oct 09 10:01:59 crc kubenswrapper[4710]: I1009 10:01:59.216362 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-82lrf"] Oct 09 10:01:59 crc kubenswrapper[4710]: E1009 10:01:59.517537 4710 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9846f4ea_1150_4743_b30e_a59c8ab8233c.slice/crio-conmon-ba814c2bef501a7e9eda8fb938cedb92cbbc67e9e11523221b4433fe377a7385.scope\": RecentStats: unable to find data in memory cache]" Oct 09 10:02:00 crc kubenswrapper[4710]: I1009 10:02:00.095033 4710 generic.go:334] "Generic (PLEG): container finished" podID="9846f4ea-1150-4743-b30e-a59c8ab8233c" containerID="ba814c2bef501a7e9eda8fb938cedb92cbbc67e9e11523221b4433fe377a7385" exitCode=0 Oct 09 10:02:00 crc kubenswrapper[4710]: I1009 10:02:00.095336 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-82lrf" event={"ID":"9846f4ea-1150-4743-b30e-a59c8ab8233c","Type":"ContainerDied","Data":"ba814c2bef501a7e9eda8fb938cedb92cbbc67e9e11523221b4433fe377a7385"} Oct 09 10:02:00 crc kubenswrapper[4710]: I1009 10:02:00.095417 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-82lrf" event={"ID":"9846f4ea-1150-4743-b30e-a59c8ab8233c","Type":"ContainerStarted","Data":"20ecaf55d224e2cef34d90f40c3c0913a3726cc40dbb13ee17c4b8b2e7dee651"} Oct 09 10:02:01 crc kubenswrapper[4710]: I1009 10:02:01.102966 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-82lrf" event={"ID":"9846f4ea-1150-4743-b30e-a59c8ab8233c","Type":"ContainerStarted","Data":"6ac428e46644bcac8cc98dc86aa0d8b3974c0823924c03c1c3de049e7a8e3eca"} Oct 09 10:02:03 crc kubenswrapper[4710]: I1009 10:02:03.116116 4710 generic.go:334] "Generic (PLEG): container finished" podID="9846f4ea-1150-4743-b30e-a59c8ab8233c" containerID="6ac428e46644bcac8cc98dc86aa0d8b3974c0823924c03c1c3de049e7a8e3eca" exitCode=0 Oct 09 10:02:03 crc kubenswrapper[4710]: I1009 10:02:03.116155 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-82lrf" event={"ID":"9846f4ea-1150-4743-b30e-a59c8ab8233c","Type":"ContainerDied","Data":"6ac428e46644bcac8cc98dc86aa0d8b3974c0823924c03c1c3de049e7a8e3eca"} Oct 09 10:02:04 crc kubenswrapper[4710]: I1009 10:02:04.128032 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-82lrf" event={"ID":"9846f4ea-1150-4743-b30e-a59c8ab8233c","Type":"ContainerStarted","Data":"04053f9e17f243ef713813c4fa82cc57c838097052df8a8069a7211822b27b1b"} Oct 09 10:02:04 crc kubenswrapper[4710]: I1009 10:02:04.141957 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-82lrf" podStartSLOduration=2.387456956 podStartE2EDuration="6.141942797s" podCreationTimestamp="2025-10-09 10:01:58 +0000 UTC" firstStartedPulling="2025-10-09 10:02:00.097631861 +0000 UTC m=+3443.587740258" lastFinishedPulling="2025-10-09 10:02:03.852117702 +0000 UTC m=+3447.342226099" observedRunningTime="2025-10-09 10:02:04.140483505 +0000 UTC m=+3447.630591903" watchObservedRunningTime="2025-10-09 10:02:04.141942797 +0000 UTC m=+3447.632051193" Oct 09 10:02:08 crc kubenswrapper[4710]: I1009 10:02:08.794478 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-82lrf" Oct 09 10:02:08 crc kubenswrapper[4710]: I1009 10:02:08.794856 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-82lrf" Oct 09 10:02:09 crc kubenswrapper[4710]: I1009 10:02:09.829463 4710 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-82lrf" podUID="9846f4ea-1150-4743-b30e-a59c8ab8233c" containerName="registry-server" probeResult="failure" output=< Oct 09 10:02:09 crc kubenswrapper[4710]: timeout: failed to connect service ":50051" within 1s Oct 09 10:02:09 crc kubenswrapper[4710]: > Oct 09 10:02:16 crc kubenswrapper[4710]: I1009 10:02:16.150371 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mpkvh"] Oct 09 10:02:16 crc kubenswrapper[4710]: I1009 10:02:16.152452 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mpkvh" Oct 09 10:02:16 crc kubenswrapper[4710]: I1009 10:02:16.159236 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mpkvh"] Oct 09 10:02:16 crc kubenswrapper[4710]: I1009 10:02:16.304535 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/693b5dcb-940b-4669-a9bc-e4fd00e48e80-utilities\") pod \"certified-operators-mpkvh\" (UID: \"693b5dcb-940b-4669-a9bc-e4fd00e48e80\") " pod="openshift-marketplace/certified-operators-mpkvh" Oct 09 10:02:16 crc kubenswrapper[4710]: I1009 10:02:16.304633 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/693b5dcb-940b-4669-a9bc-e4fd00e48e80-catalog-content\") pod \"certified-operators-mpkvh\" (UID: \"693b5dcb-940b-4669-a9bc-e4fd00e48e80\") " pod="openshift-marketplace/certified-operators-mpkvh" Oct 09 10:02:16 crc kubenswrapper[4710]: I1009 10:02:16.304748 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fkdr\" (UniqueName: \"kubernetes.io/projected/693b5dcb-940b-4669-a9bc-e4fd00e48e80-kube-api-access-7fkdr\") pod \"certified-operators-mpkvh\" (UID: \"693b5dcb-940b-4669-a9bc-e4fd00e48e80\") " pod="openshift-marketplace/certified-operators-mpkvh" Oct 09 10:02:16 crc kubenswrapper[4710]: I1009 10:02:16.406166 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/693b5dcb-940b-4669-a9bc-e4fd00e48e80-catalog-content\") pod \"certified-operators-mpkvh\" (UID: \"693b5dcb-940b-4669-a9bc-e4fd00e48e80\") " pod="openshift-marketplace/certified-operators-mpkvh" Oct 09 10:02:16 crc kubenswrapper[4710]: I1009 10:02:16.406298 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fkdr\" (UniqueName: \"kubernetes.io/projected/693b5dcb-940b-4669-a9bc-e4fd00e48e80-kube-api-access-7fkdr\") pod \"certified-operators-mpkvh\" (UID: \"693b5dcb-940b-4669-a9bc-e4fd00e48e80\") " pod="openshift-marketplace/certified-operators-mpkvh" Oct 09 10:02:16 crc kubenswrapper[4710]: I1009 10:02:16.406368 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/693b5dcb-940b-4669-a9bc-e4fd00e48e80-utilities\") pod \"certified-operators-mpkvh\" (UID: \"693b5dcb-940b-4669-a9bc-e4fd00e48e80\") " pod="openshift-marketplace/certified-operators-mpkvh" Oct 09 10:02:16 crc kubenswrapper[4710]: I1009 10:02:16.406713 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/693b5dcb-940b-4669-a9bc-e4fd00e48e80-catalog-content\") pod \"certified-operators-mpkvh\" (UID: \"693b5dcb-940b-4669-a9bc-e4fd00e48e80\") " pod="openshift-marketplace/certified-operators-mpkvh" Oct 09 10:02:16 crc kubenswrapper[4710]: I1009 10:02:16.406753 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/693b5dcb-940b-4669-a9bc-e4fd00e48e80-utilities\") pod \"certified-operators-mpkvh\" (UID: \"693b5dcb-940b-4669-a9bc-e4fd00e48e80\") " pod="openshift-marketplace/certified-operators-mpkvh" Oct 09 10:02:16 crc kubenswrapper[4710]: I1009 10:02:16.425079 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fkdr\" (UniqueName: \"kubernetes.io/projected/693b5dcb-940b-4669-a9bc-e4fd00e48e80-kube-api-access-7fkdr\") pod \"certified-operators-mpkvh\" (UID: \"693b5dcb-940b-4669-a9bc-e4fd00e48e80\") " pod="openshift-marketplace/certified-operators-mpkvh" Oct 09 10:02:16 crc kubenswrapper[4710]: I1009 10:02:16.466332 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mpkvh" Oct 09 10:02:17 crc kubenswrapper[4710]: I1009 10:02:17.041487 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mpkvh"] Oct 09 10:02:17 crc kubenswrapper[4710]: I1009 10:02:17.218185 4710 generic.go:334] "Generic (PLEG): container finished" podID="693b5dcb-940b-4669-a9bc-e4fd00e48e80" containerID="2d15621b0c576056e709321eef8d38c1a2134143f0d61408ca8438b537e1c247" exitCode=0 Oct 09 10:02:17 crc kubenswrapper[4710]: I1009 10:02:17.218225 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mpkvh" event={"ID":"693b5dcb-940b-4669-a9bc-e4fd00e48e80","Type":"ContainerDied","Data":"2d15621b0c576056e709321eef8d38c1a2134143f0d61408ca8438b537e1c247"} Oct 09 10:02:17 crc kubenswrapper[4710]: I1009 10:02:17.218250 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mpkvh" event={"ID":"693b5dcb-940b-4669-a9bc-e4fd00e48e80","Type":"ContainerStarted","Data":"2d6e1fbdfc156cbe7bf38d689c91de8d156e7245ef7acd11e0ab60cbe467efc3"} Oct 09 10:02:18 crc kubenswrapper[4710]: I1009 10:02:18.231504 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mpkvh" event={"ID":"693b5dcb-940b-4669-a9bc-e4fd00e48e80","Type":"ContainerStarted","Data":"bda7674b98888237152d6b9108e4a57fbc5f17761464d32e8ee25fecb3d5c55e"} Oct 09 10:02:18 crc kubenswrapper[4710]: I1009 10:02:18.833116 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-82lrf" Oct 09 10:02:18 crc kubenswrapper[4710]: I1009 10:02:18.871686 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-82lrf" Oct 09 10:02:19 crc kubenswrapper[4710]: I1009 10:02:19.240931 4710 generic.go:334] "Generic (PLEG): container finished" podID="693b5dcb-940b-4669-a9bc-e4fd00e48e80" containerID="bda7674b98888237152d6b9108e4a57fbc5f17761464d32e8ee25fecb3d5c55e" exitCode=0 Oct 09 10:02:19 crc kubenswrapper[4710]: I1009 10:02:19.241030 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mpkvh" event={"ID":"693b5dcb-940b-4669-a9bc-e4fd00e48e80","Type":"ContainerDied","Data":"bda7674b98888237152d6b9108e4a57fbc5f17761464d32e8ee25fecb3d5c55e"} Oct 09 10:02:20 crc kubenswrapper[4710]: I1009 10:02:20.249982 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mpkvh" event={"ID":"693b5dcb-940b-4669-a9bc-e4fd00e48e80","Type":"ContainerStarted","Data":"d50d833634e1f53bb93baef4446ce53f3756405230deea4296193503c1fc851f"} Oct 09 10:02:21 crc kubenswrapper[4710]: I1009 10:02:21.130721 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mpkvh" podStartSLOduration=2.598375878 podStartE2EDuration="5.130705971s" podCreationTimestamp="2025-10-09 10:02:16 +0000 UTC" firstStartedPulling="2025-10-09 10:02:17.219573821 +0000 UTC m=+3460.709682218" lastFinishedPulling="2025-10-09 10:02:19.751903914 +0000 UTC m=+3463.242012311" observedRunningTime="2025-10-09 10:02:20.264678546 +0000 UTC m=+3463.754786944" watchObservedRunningTime="2025-10-09 10:02:21.130705971 +0000 UTC m=+3464.620814358" Oct 09 10:02:21 crc kubenswrapper[4710]: I1009 10:02:21.133569 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-82lrf"] Oct 09 10:02:21 crc kubenswrapper[4710]: I1009 10:02:21.133766 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-82lrf" podUID="9846f4ea-1150-4743-b30e-a59c8ab8233c" containerName="registry-server" containerID="cri-o://04053f9e17f243ef713813c4fa82cc57c838097052df8a8069a7211822b27b1b" gracePeriod=2 Oct 09 10:02:21 crc kubenswrapper[4710]: I1009 10:02:21.260119 4710 generic.go:334] "Generic (PLEG): container finished" podID="9846f4ea-1150-4743-b30e-a59c8ab8233c" containerID="04053f9e17f243ef713813c4fa82cc57c838097052df8a8069a7211822b27b1b" exitCode=0 Oct 09 10:02:21 crc kubenswrapper[4710]: I1009 10:02:21.260152 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-82lrf" event={"ID":"9846f4ea-1150-4743-b30e-a59c8ab8233c","Type":"ContainerDied","Data":"04053f9e17f243ef713813c4fa82cc57c838097052df8a8069a7211822b27b1b"} Oct 09 10:02:21 crc kubenswrapper[4710]: I1009 10:02:21.676352 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-82lrf" Oct 09 10:02:21 crc kubenswrapper[4710]: I1009 10:02:21.707346 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5ml6\" (UniqueName: \"kubernetes.io/projected/9846f4ea-1150-4743-b30e-a59c8ab8233c-kube-api-access-t5ml6\") pod \"9846f4ea-1150-4743-b30e-a59c8ab8233c\" (UID: \"9846f4ea-1150-4743-b30e-a59c8ab8233c\") " Oct 09 10:02:21 crc kubenswrapper[4710]: I1009 10:02:21.707394 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9846f4ea-1150-4743-b30e-a59c8ab8233c-utilities\") pod \"9846f4ea-1150-4743-b30e-a59c8ab8233c\" (UID: \"9846f4ea-1150-4743-b30e-a59c8ab8233c\") " Oct 09 10:02:21 crc kubenswrapper[4710]: I1009 10:02:21.707516 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9846f4ea-1150-4743-b30e-a59c8ab8233c-catalog-content\") pod \"9846f4ea-1150-4743-b30e-a59c8ab8233c\" (UID: \"9846f4ea-1150-4743-b30e-a59c8ab8233c\") " Oct 09 10:02:21 crc kubenswrapper[4710]: I1009 10:02:21.711226 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9846f4ea-1150-4743-b30e-a59c8ab8233c-utilities" (OuterVolumeSpecName: "utilities") pod "9846f4ea-1150-4743-b30e-a59c8ab8233c" (UID: "9846f4ea-1150-4743-b30e-a59c8ab8233c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 10:02:21 crc kubenswrapper[4710]: I1009 10:02:21.715976 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9846f4ea-1150-4743-b30e-a59c8ab8233c-kube-api-access-t5ml6" (OuterVolumeSpecName: "kube-api-access-t5ml6") pod "9846f4ea-1150-4743-b30e-a59c8ab8233c" (UID: "9846f4ea-1150-4743-b30e-a59c8ab8233c"). InnerVolumeSpecName "kube-api-access-t5ml6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 10:02:21 crc kubenswrapper[4710]: I1009 10:02:21.773550 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9846f4ea-1150-4743-b30e-a59c8ab8233c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9846f4ea-1150-4743-b30e-a59c8ab8233c" (UID: "9846f4ea-1150-4743-b30e-a59c8ab8233c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 10:02:21 crc kubenswrapper[4710]: I1009 10:02:21.809918 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9846f4ea-1150-4743-b30e-a59c8ab8233c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 10:02:21 crc kubenswrapper[4710]: I1009 10:02:21.809948 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5ml6\" (UniqueName: \"kubernetes.io/projected/9846f4ea-1150-4743-b30e-a59c8ab8233c-kube-api-access-t5ml6\") on node \"crc\" DevicePath \"\"" Oct 09 10:02:21 crc kubenswrapper[4710]: I1009 10:02:21.809959 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9846f4ea-1150-4743-b30e-a59c8ab8233c-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 10:02:22 crc kubenswrapper[4710]: I1009 10:02:22.270032 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-82lrf" event={"ID":"9846f4ea-1150-4743-b30e-a59c8ab8233c","Type":"ContainerDied","Data":"20ecaf55d224e2cef34d90f40c3c0913a3726cc40dbb13ee17c4b8b2e7dee651"} Oct 09 10:02:22 crc kubenswrapper[4710]: I1009 10:02:22.270079 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-82lrf" Oct 09 10:02:22 crc kubenswrapper[4710]: I1009 10:02:22.270080 4710 scope.go:117] "RemoveContainer" containerID="04053f9e17f243ef713813c4fa82cc57c838097052df8a8069a7211822b27b1b" Oct 09 10:02:22 crc kubenswrapper[4710]: I1009 10:02:22.295624 4710 scope.go:117] "RemoveContainer" containerID="6ac428e46644bcac8cc98dc86aa0d8b3974c0823924c03c1c3de049e7a8e3eca" Oct 09 10:02:22 crc kubenswrapper[4710]: I1009 10:02:22.297360 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-82lrf"] Oct 09 10:02:22 crc kubenswrapper[4710]: I1009 10:02:22.303591 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-82lrf"] Oct 09 10:02:22 crc kubenswrapper[4710]: I1009 10:02:22.313002 4710 scope.go:117] "RemoveContainer" containerID="ba814c2bef501a7e9eda8fb938cedb92cbbc67e9e11523221b4433fe377a7385" Oct 09 10:02:22 crc kubenswrapper[4710]: I1009 10:02:22.823382 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9846f4ea-1150-4743-b30e-a59c8ab8233c" path="/var/lib/kubelet/pods/9846f4ea-1150-4743-b30e-a59c8ab8233c/volumes" Oct 09 10:02:26 crc kubenswrapper[4710]: I1009 10:02:26.467351 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mpkvh" Oct 09 10:02:26 crc kubenswrapper[4710]: I1009 10:02:26.467771 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mpkvh" Oct 09 10:02:26 crc kubenswrapper[4710]: I1009 10:02:26.501572 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mpkvh" Oct 09 10:02:27 crc kubenswrapper[4710]: I1009 10:02:27.342683 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mpkvh" Oct 09 10:02:31 crc kubenswrapper[4710]: I1009 10:02:31.532647 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mpkvh"] Oct 09 10:02:31 crc kubenswrapper[4710]: I1009 10:02:31.533504 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mpkvh" podUID="693b5dcb-940b-4669-a9bc-e4fd00e48e80" containerName="registry-server" containerID="cri-o://d50d833634e1f53bb93baef4446ce53f3756405230deea4296193503c1fc851f" gracePeriod=2 Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.048470 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mpkvh" Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.069592 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/693b5dcb-940b-4669-a9bc-e4fd00e48e80-utilities\") pod \"693b5dcb-940b-4669-a9bc-e4fd00e48e80\" (UID: \"693b5dcb-940b-4669-a9bc-e4fd00e48e80\") " Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.069632 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fkdr\" (UniqueName: \"kubernetes.io/projected/693b5dcb-940b-4669-a9bc-e4fd00e48e80-kube-api-access-7fkdr\") pod \"693b5dcb-940b-4669-a9bc-e4fd00e48e80\" (UID: \"693b5dcb-940b-4669-a9bc-e4fd00e48e80\") " Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.069758 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/693b5dcb-940b-4669-a9bc-e4fd00e48e80-catalog-content\") pod \"693b5dcb-940b-4669-a9bc-e4fd00e48e80\" (UID: \"693b5dcb-940b-4669-a9bc-e4fd00e48e80\") " Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.070263 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/693b5dcb-940b-4669-a9bc-e4fd00e48e80-utilities" (OuterVolumeSpecName: "utilities") pod "693b5dcb-940b-4669-a9bc-e4fd00e48e80" (UID: "693b5dcb-940b-4669-a9bc-e4fd00e48e80"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.075982 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/693b5dcb-940b-4669-a9bc-e4fd00e48e80-kube-api-access-7fkdr" (OuterVolumeSpecName: "kube-api-access-7fkdr") pod "693b5dcb-940b-4669-a9bc-e4fd00e48e80" (UID: "693b5dcb-940b-4669-a9bc-e4fd00e48e80"). InnerVolumeSpecName "kube-api-access-7fkdr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.105622 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/693b5dcb-940b-4669-a9bc-e4fd00e48e80-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "693b5dcb-940b-4669-a9bc-e4fd00e48e80" (UID: "693b5dcb-940b-4669-a9bc-e4fd00e48e80"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.171328 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/693b5dcb-940b-4669-a9bc-e4fd00e48e80-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.171363 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fkdr\" (UniqueName: \"kubernetes.io/projected/693b5dcb-940b-4669-a9bc-e4fd00e48e80-kube-api-access-7fkdr\") on node \"crc\" DevicePath \"\"" Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.171373 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/693b5dcb-940b-4669-a9bc-e4fd00e48e80-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.344101 4710 generic.go:334] "Generic (PLEG): container finished" podID="693b5dcb-940b-4669-a9bc-e4fd00e48e80" containerID="d50d833634e1f53bb93baef4446ce53f3756405230deea4296193503c1fc851f" exitCode=0 Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.344467 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mpkvh" event={"ID":"693b5dcb-940b-4669-a9bc-e4fd00e48e80","Type":"ContainerDied","Data":"d50d833634e1f53bb93baef4446ce53f3756405230deea4296193503c1fc851f"} Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.344929 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mpkvh" event={"ID":"693b5dcb-940b-4669-a9bc-e4fd00e48e80","Type":"ContainerDied","Data":"2d6e1fbdfc156cbe7bf38d689c91de8d156e7245ef7acd11e0ab60cbe467efc3"} Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.344532 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mpkvh" Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.345027 4710 scope.go:117] "RemoveContainer" containerID="d50d833634e1f53bb93baef4446ce53f3756405230deea4296193503c1fc851f" Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.364504 4710 scope.go:117] "RemoveContainer" containerID="bda7674b98888237152d6b9108e4a57fbc5f17761464d32e8ee25fecb3d5c55e" Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.376471 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mpkvh"] Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.381393 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mpkvh"] Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.394847 4710 scope.go:117] "RemoveContainer" containerID="2d15621b0c576056e709321eef8d38c1a2134143f0d61408ca8438b537e1c247" Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.423343 4710 scope.go:117] "RemoveContainer" containerID="d50d833634e1f53bb93baef4446ce53f3756405230deea4296193503c1fc851f" Oct 09 10:02:32 crc kubenswrapper[4710]: E1009 10:02:32.423653 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d50d833634e1f53bb93baef4446ce53f3756405230deea4296193503c1fc851f\": container with ID starting with d50d833634e1f53bb93baef4446ce53f3756405230deea4296193503c1fc851f not found: ID does not exist" containerID="d50d833634e1f53bb93baef4446ce53f3756405230deea4296193503c1fc851f" Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.423682 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d50d833634e1f53bb93baef4446ce53f3756405230deea4296193503c1fc851f"} err="failed to get container status \"d50d833634e1f53bb93baef4446ce53f3756405230deea4296193503c1fc851f\": rpc error: code = NotFound desc = could not find container \"d50d833634e1f53bb93baef4446ce53f3756405230deea4296193503c1fc851f\": container with ID starting with d50d833634e1f53bb93baef4446ce53f3756405230deea4296193503c1fc851f not found: ID does not exist" Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.423703 4710 scope.go:117] "RemoveContainer" containerID="bda7674b98888237152d6b9108e4a57fbc5f17761464d32e8ee25fecb3d5c55e" Oct 09 10:02:32 crc kubenswrapper[4710]: E1009 10:02:32.423982 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bda7674b98888237152d6b9108e4a57fbc5f17761464d32e8ee25fecb3d5c55e\": container with ID starting with bda7674b98888237152d6b9108e4a57fbc5f17761464d32e8ee25fecb3d5c55e not found: ID does not exist" containerID="bda7674b98888237152d6b9108e4a57fbc5f17761464d32e8ee25fecb3d5c55e" Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.424003 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bda7674b98888237152d6b9108e4a57fbc5f17761464d32e8ee25fecb3d5c55e"} err="failed to get container status \"bda7674b98888237152d6b9108e4a57fbc5f17761464d32e8ee25fecb3d5c55e\": rpc error: code = NotFound desc = could not find container \"bda7674b98888237152d6b9108e4a57fbc5f17761464d32e8ee25fecb3d5c55e\": container with ID starting with bda7674b98888237152d6b9108e4a57fbc5f17761464d32e8ee25fecb3d5c55e not found: ID does not exist" Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.424015 4710 scope.go:117] "RemoveContainer" containerID="2d15621b0c576056e709321eef8d38c1a2134143f0d61408ca8438b537e1c247" Oct 09 10:02:32 crc kubenswrapper[4710]: E1009 10:02:32.424225 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d15621b0c576056e709321eef8d38c1a2134143f0d61408ca8438b537e1c247\": container with ID starting with 2d15621b0c576056e709321eef8d38c1a2134143f0d61408ca8438b537e1c247 not found: ID does not exist" containerID="2d15621b0c576056e709321eef8d38c1a2134143f0d61408ca8438b537e1c247" Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.424246 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d15621b0c576056e709321eef8d38c1a2134143f0d61408ca8438b537e1c247"} err="failed to get container status \"2d15621b0c576056e709321eef8d38c1a2134143f0d61408ca8438b537e1c247\": rpc error: code = NotFound desc = could not find container \"2d15621b0c576056e709321eef8d38c1a2134143f0d61408ca8438b537e1c247\": container with ID starting with 2d15621b0c576056e709321eef8d38c1a2134143f0d61408ca8438b537e1c247 not found: ID does not exist" Oct 09 10:02:32 crc kubenswrapper[4710]: I1009 10:02:32.824180 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="693b5dcb-940b-4669-a9bc-e4fd00e48e80" path="/var/lib/kubelet/pods/693b5dcb-940b-4669-a9bc-e4fd00e48e80/volumes" Oct 09 10:03:35 crc kubenswrapper[4710]: I1009 10:03:35.545850 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 10:03:35 crc kubenswrapper[4710]: I1009 10:03:35.546420 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 10:04:05 crc kubenswrapper[4710]: I1009 10:04:05.545916 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 10:04:05 crc kubenswrapper[4710]: I1009 10:04:05.546312 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 10:04:14 crc kubenswrapper[4710]: I1009 10:04:14.054459 4710 generic.go:334] "Generic (PLEG): container finished" podID="36f8a9d9-4342-4b4f-921b-a0acbe1215db" containerID="fc2e0f94f651966a3671a5af7bd702dc5271ed6626611d82ffad607f222fa3c6" exitCode=0 Oct 09 10:04:14 crc kubenswrapper[4710]: I1009 10:04:14.054467 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"36f8a9d9-4342-4b4f-921b-a0acbe1215db","Type":"ContainerDied","Data":"fc2e0f94f651966a3671a5af7bd702dc5271ed6626611d82ffad607f222fa3c6"} Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.565970 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.721227 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/36f8a9d9-4342-4b4f-921b-a0acbe1215db-test-operator-ephemeral-workdir\") pod \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.721281 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/36f8a9d9-4342-4b4f-921b-a0acbe1215db-ca-certs\") pod \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.721495 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/36f8a9d9-4342-4b4f-921b-a0acbe1215db-openstack-config-secret\") pod \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.721544 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/36f8a9d9-4342-4b4f-921b-a0acbe1215db-openstack-config\") pod \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.721564 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/36f8a9d9-4342-4b4f-921b-a0acbe1215db-config-data\") pod \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.721597 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/36f8a9d9-4342-4b4f-921b-a0acbe1215db-test-operator-ephemeral-temporary\") pod \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.721626 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.721704 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cl2qf\" (UniqueName: \"kubernetes.io/projected/36f8a9d9-4342-4b4f-921b-a0acbe1215db-kube-api-access-cl2qf\") pod \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.721738 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/36f8a9d9-4342-4b4f-921b-a0acbe1215db-ssh-key\") pod \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\" (UID: \"36f8a9d9-4342-4b4f-921b-a0acbe1215db\") " Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.723465 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36f8a9d9-4342-4b4f-921b-a0acbe1215db-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "36f8a9d9-4342-4b4f-921b-a0acbe1215db" (UID: "36f8a9d9-4342-4b4f-921b-a0acbe1215db"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.723564 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/36f8a9d9-4342-4b4f-921b-a0acbe1215db-config-data" (OuterVolumeSpecName: "config-data") pod "36f8a9d9-4342-4b4f-921b-a0acbe1215db" (UID: "36f8a9d9-4342-4b4f-921b-a0acbe1215db"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.723956 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36f8a9d9-4342-4b4f-921b-a0acbe1215db-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "36f8a9d9-4342-4b4f-921b-a0acbe1215db" (UID: "36f8a9d9-4342-4b4f-921b-a0acbe1215db"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.740644 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "test-operator-logs") pod "36f8a9d9-4342-4b4f-921b-a0acbe1215db" (UID: "36f8a9d9-4342-4b4f-921b-a0acbe1215db"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.744066 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36f8a9d9-4342-4b4f-921b-a0acbe1215db-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "36f8a9d9-4342-4b4f-921b-a0acbe1215db" (UID: "36f8a9d9-4342-4b4f-921b-a0acbe1215db"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.745685 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36f8a9d9-4342-4b4f-921b-a0acbe1215db-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "36f8a9d9-4342-4b4f-921b-a0acbe1215db" (UID: "36f8a9d9-4342-4b4f-921b-a0acbe1215db"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.746202 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36f8a9d9-4342-4b4f-921b-a0acbe1215db-kube-api-access-cl2qf" (OuterVolumeSpecName: "kube-api-access-cl2qf") pod "36f8a9d9-4342-4b4f-921b-a0acbe1215db" (UID: "36f8a9d9-4342-4b4f-921b-a0acbe1215db"). InnerVolumeSpecName "kube-api-access-cl2qf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.752292 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36f8a9d9-4342-4b4f-921b-a0acbe1215db-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "36f8a9d9-4342-4b4f-921b-a0acbe1215db" (UID: "36f8a9d9-4342-4b4f-921b-a0acbe1215db"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.766424 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/36f8a9d9-4342-4b4f-921b-a0acbe1215db-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "36f8a9d9-4342-4b4f-921b-a0acbe1215db" (UID: "36f8a9d9-4342-4b4f-921b-a0acbe1215db"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.823335 4710 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/36f8a9d9-4342-4b4f-921b-a0acbe1215db-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.823811 4710 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/36f8a9d9-4342-4b4f-921b-a0acbe1215db-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.823880 4710 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/36f8a9d9-4342-4b4f-921b-a0acbe1215db-config-data\") on node \"crc\" DevicePath \"\"" Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.823930 4710 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/36f8a9d9-4342-4b4f-921b-a0acbe1215db-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.824084 4710 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.824380 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cl2qf\" (UniqueName: \"kubernetes.io/projected/36f8a9d9-4342-4b4f-921b-a0acbe1215db-kube-api-access-cl2qf\") on node \"crc\" DevicePath \"\"" Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.824403 4710 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/36f8a9d9-4342-4b4f-921b-a0acbe1215db-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.824415 4710 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/36f8a9d9-4342-4b4f-921b-a0acbe1215db-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.824423 4710 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/36f8a9d9-4342-4b4f-921b-a0acbe1215db-ca-certs\") on node \"crc\" DevicePath \"\"" Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.838854 4710 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Oct 09 10:04:15 crc kubenswrapper[4710]: I1009 10:04:15.925739 4710 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Oct 09 10:04:16 crc kubenswrapper[4710]: I1009 10:04:16.070844 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"36f8a9d9-4342-4b4f-921b-a0acbe1215db","Type":"ContainerDied","Data":"4ba79cc181887cef054997939a32875dc72c401dd49c7a5418a3c67af600a2e1"} Oct 09 10:04:16 crc kubenswrapper[4710]: I1009 10:04:16.070880 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4ba79cc181887cef054997939a32875dc72c401dd49c7a5418a3c67af600a2e1" Oct 09 10:04:16 crc kubenswrapper[4710]: I1009 10:04:16.070886 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.266760 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 09 10:04:27 crc kubenswrapper[4710]: E1009 10:04:27.268363 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36f8a9d9-4342-4b4f-921b-a0acbe1215db" containerName="tempest-tests-tempest-tests-runner" Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.268462 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="36f8a9d9-4342-4b4f-921b-a0acbe1215db" containerName="tempest-tests-tempest-tests-runner" Oct 09 10:04:27 crc kubenswrapper[4710]: E1009 10:04:27.268538 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="693b5dcb-940b-4669-a9bc-e4fd00e48e80" containerName="extract-utilities" Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.268585 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="693b5dcb-940b-4669-a9bc-e4fd00e48e80" containerName="extract-utilities" Oct 09 10:04:27 crc kubenswrapper[4710]: E1009 10:04:27.268636 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9846f4ea-1150-4743-b30e-a59c8ab8233c" containerName="extract-content" Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.268738 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="9846f4ea-1150-4743-b30e-a59c8ab8233c" containerName="extract-content" Oct 09 10:04:27 crc kubenswrapper[4710]: E1009 10:04:27.268793 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="693b5dcb-940b-4669-a9bc-e4fd00e48e80" containerName="registry-server" Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.269050 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="693b5dcb-940b-4669-a9bc-e4fd00e48e80" containerName="registry-server" Oct 09 10:04:27 crc kubenswrapper[4710]: E1009 10:04:27.269106 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9846f4ea-1150-4743-b30e-a59c8ab8233c" containerName="extract-utilities" Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.269147 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="9846f4ea-1150-4743-b30e-a59c8ab8233c" containerName="extract-utilities" Oct 09 10:04:27 crc kubenswrapper[4710]: E1009 10:04:27.269194 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="693b5dcb-940b-4669-a9bc-e4fd00e48e80" containerName="extract-content" Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.269239 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="693b5dcb-940b-4669-a9bc-e4fd00e48e80" containerName="extract-content" Oct 09 10:04:27 crc kubenswrapper[4710]: E1009 10:04:27.269286 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9846f4ea-1150-4743-b30e-a59c8ab8233c" containerName="registry-server" Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.269325 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="9846f4ea-1150-4743-b30e-a59c8ab8233c" containerName="registry-server" Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.269514 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="9846f4ea-1150-4743-b30e-a59c8ab8233c" containerName="registry-server" Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.269579 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="36f8a9d9-4342-4b4f-921b-a0acbe1215db" containerName="tempest-tests-tempest-tests-runner" Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.269631 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="693b5dcb-940b-4669-a9bc-e4fd00e48e80" containerName="registry-server" Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.270168 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.271965 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-b95j2" Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.275089 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.398334 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"b4ec062d-c82a-4a34-801c-4a290d15c32e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.398506 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzr22\" (UniqueName: \"kubernetes.io/projected/b4ec062d-c82a-4a34-801c-4a290d15c32e-kube-api-access-vzr22\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"b4ec062d-c82a-4a34-801c-4a290d15c32e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.499608 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"b4ec062d-c82a-4a34-801c-4a290d15c32e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.499906 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzr22\" (UniqueName: \"kubernetes.io/projected/b4ec062d-c82a-4a34-801c-4a290d15c32e-kube-api-access-vzr22\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"b4ec062d-c82a-4a34-801c-4a290d15c32e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.499925 4710 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"b4ec062d-c82a-4a34-801c-4a290d15c32e\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.525246 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzr22\" (UniqueName: \"kubernetes.io/projected/b4ec062d-c82a-4a34-801c-4a290d15c32e-kube-api-access-vzr22\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"b4ec062d-c82a-4a34-801c-4a290d15c32e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.528294 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"b4ec062d-c82a-4a34-801c-4a290d15c32e\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.585896 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.940517 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Oct 09 10:04:27 crc kubenswrapper[4710]: I1009 10:04:27.947973 4710 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 10:04:28 crc kubenswrapper[4710]: I1009 10:04:28.145789 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"b4ec062d-c82a-4a34-801c-4a290d15c32e","Type":"ContainerStarted","Data":"0baff2f364d001b092f58625c9688d897ef06b84045b2b2545671aa988fa7c9e"} Oct 09 10:04:29 crc kubenswrapper[4710]: I1009 10:04:29.154194 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"b4ec062d-c82a-4a34-801c-4a290d15c32e","Type":"ContainerStarted","Data":"7f8ce26297048641da1fd079daa5a287ec5166771d7ee215d4e89aa469fa4096"} Oct 09 10:04:29 crc kubenswrapper[4710]: I1009 10:04:29.168766 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.186441494 podStartE2EDuration="2.168753316s" podCreationTimestamp="2025-10-09 10:04:27 +0000 UTC" firstStartedPulling="2025-10-09 10:04:27.947770811 +0000 UTC m=+3591.437879207" lastFinishedPulling="2025-10-09 10:04:28.930082632 +0000 UTC m=+3592.420191029" observedRunningTime="2025-10-09 10:04:29.164809038 +0000 UTC m=+3592.654917435" watchObservedRunningTime="2025-10-09 10:04:29.168753316 +0000 UTC m=+3592.658861713" Oct 09 10:04:35 crc kubenswrapper[4710]: I1009 10:04:35.545765 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 10:04:35 crc kubenswrapper[4710]: I1009 10:04:35.546276 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 10:04:35 crc kubenswrapper[4710]: I1009 10:04:35.546318 4710 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 10:04:35 crc kubenswrapper[4710]: I1009 10:04:35.547280 4710 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d"} pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 10:04:35 crc kubenswrapper[4710]: I1009 10:04:35.547343 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" containerID="cri-o://a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" gracePeriod=600 Oct 09 10:04:35 crc kubenswrapper[4710]: E1009 10:04:35.664648 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:04:36 crc kubenswrapper[4710]: I1009 10:04:36.206508 4710 generic.go:334] "Generic (PLEG): container finished" podID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" exitCode=0 Oct 09 10:04:36 crc kubenswrapper[4710]: I1009 10:04:36.206571 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerDied","Data":"a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d"} Oct 09 10:04:36 crc kubenswrapper[4710]: I1009 10:04:36.206633 4710 scope.go:117] "RemoveContainer" containerID="87d054944fb6c806eb0843e146a21517502d7028c16689af5a0b689040c6c2a2" Oct 09 10:04:36 crc kubenswrapper[4710]: I1009 10:04:36.207162 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:04:36 crc kubenswrapper[4710]: E1009 10:04:36.207515 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:04:44 crc kubenswrapper[4710]: I1009 10:04:44.146358 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-gqx5w/must-gather-sx4kd"] Oct 09 10:04:44 crc kubenswrapper[4710]: I1009 10:04:44.151406 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gqx5w/must-gather-sx4kd" Oct 09 10:04:44 crc kubenswrapper[4710]: I1009 10:04:44.166873 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-gqx5w"/"openshift-service-ca.crt" Oct 09 10:04:44 crc kubenswrapper[4710]: I1009 10:04:44.167206 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-gqx5w"/"default-dockercfg-kl2lq" Oct 09 10:04:44 crc kubenswrapper[4710]: I1009 10:04:44.170936 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-gqx5w"/"kube-root-ca.crt" Oct 09 10:04:44 crc kubenswrapper[4710]: I1009 10:04:44.179326 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-gqx5w/must-gather-sx4kd"] Oct 09 10:04:44 crc kubenswrapper[4710]: I1009 10:04:44.226608 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dh82\" (UniqueName: \"kubernetes.io/projected/3a51086d-64d6-4b48-96e8-2d2015392224-kube-api-access-5dh82\") pod \"must-gather-sx4kd\" (UID: \"3a51086d-64d6-4b48-96e8-2d2015392224\") " pod="openshift-must-gather-gqx5w/must-gather-sx4kd" Oct 09 10:04:44 crc kubenswrapper[4710]: I1009 10:04:44.226738 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3a51086d-64d6-4b48-96e8-2d2015392224-must-gather-output\") pod \"must-gather-sx4kd\" (UID: \"3a51086d-64d6-4b48-96e8-2d2015392224\") " pod="openshift-must-gather-gqx5w/must-gather-sx4kd" Oct 09 10:04:44 crc kubenswrapper[4710]: I1009 10:04:44.328605 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dh82\" (UniqueName: \"kubernetes.io/projected/3a51086d-64d6-4b48-96e8-2d2015392224-kube-api-access-5dh82\") pod \"must-gather-sx4kd\" (UID: \"3a51086d-64d6-4b48-96e8-2d2015392224\") " pod="openshift-must-gather-gqx5w/must-gather-sx4kd" Oct 09 10:04:44 crc kubenswrapper[4710]: I1009 10:04:44.328718 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3a51086d-64d6-4b48-96e8-2d2015392224-must-gather-output\") pod \"must-gather-sx4kd\" (UID: \"3a51086d-64d6-4b48-96e8-2d2015392224\") " pod="openshift-must-gather-gqx5w/must-gather-sx4kd" Oct 09 10:04:44 crc kubenswrapper[4710]: I1009 10:04:44.329168 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3a51086d-64d6-4b48-96e8-2d2015392224-must-gather-output\") pod \"must-gather-sx4kd\" (UID: \"3a51086d-64d6-4b48-96e8-2d2015392224\") " pod="openshift-must-gather-gqx5w/must-gather-sx4kd" Oct 09 10:04:44 crc kubenswrapper[4710]: I1009 10:04:44.352585 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dh82\" (UniqueName: \"kubernetes.io/projected/3a51086d-64d6-4b48-96e8-2d2015392224-kube-api-access-5dh82\") pod \"must-gather-sx4kd\" (UID: \"3a51086d-64d6-4b48-96e8-2d2015392224\") " pod="openshift-must-gather-gqx5w/must-gather-sx4kd" Oct 09 10:04:44 crc kubenswrapper[4710]: I1009 10:04:44.486600 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gqx5w/must-gather-sx4kd" Oct 09 10:04:44 crc kubenswrapper[4710]: I1009 10:04:44.909479 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-gqx5w/must-gather-sx4kd"] Oct 09 10:04:45 crc kubenswrapper[4710]: I1009 10:04:45.270718 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gqx5w/must-gather-sx4kd" event={"ID":"3a51086d-64d6-4b48-96e8-2d2015392224","Type":"ContainerStarted","Data":"56ece4f8e59fad98c53891e767e030829e0d73093ff3ffd44c186fb5ed664320"} Oct 09 10:04:49 crc kubenswrapper[4710]: I1009 10:04:49.317189 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gqx5w/must-gather-sx4kd" event={"ID":"3a51086d-64d6-4b48-96e8-2d2015392224","Type":"ContainerStarted","Data":"80be40503dcb87a8f2d96132c02b4cf1de1444dec1ec891626553fce7ec939ba"} Oct 09 10:04:49 crc kubenswrapper[4710]: I1009 10:04:49.815445 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:04:49 crc kubenswrapper[4710]: E1009 10:04:49.815825 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:04:50 crc kubenswrapper[4710]: I1009 10:04:50.328106 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gqx5w/must-gather-sx4kd" event={"ID":"3a51086d-64d6-4b48-96e8-2d2015392224","Type":"ContainerStarted","Data":"e734ff99b15a31351d67c5a557f7c3f5f61b590037e029b23679f4bc9a27ae4f"} Oct 09 10:04:50 crc kubenswrapper[4710]: I1009 10:04:50.348381 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-gqx5w/must-gather-sx4kd" podStartSLOduration=2.331825345 podStartE2EDuration="6.348365515s" podCreationTimestamp="2025-10-09 10:04:44 +0000 UTC" firstStartedPulling="2025-10-09 10:04:44.921249351 +0000 UTC m=+3608.411357748" lastFinishedPulling="2025-10-09 10:04:48.937789522 +0000 UTC m=+3612.427897918" observedRunningTime="2025-10-09 10:04:50.341125884 +0000 UTC m=+3613.831234280" watchObservedRunningTime="2025-10-09 10:04:50.348365515 +0000 UTC m=+3613.838473912" Oct 09 10:04:53 crc kubenswrapper[4710]: I1009 10:04:53.791583 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-gqx5w/crc-debug-sz2q4"] Oct 09 10:04:53 crc kubenswrapper[4710]: I1009 10:04:53.794618 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gqx5w/crc-debug-sz2q4" Oct 09 10:04:53 crc kubenswrapper[4710]: I1009 10:04:53.881308 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1a7a1e1a-2a97-413a-99b0-294d55f83dfb-host\") pod \"crc-debug-sz2q4\" (UID: \"1a7a1e1a-2a97-413a-99b0-294d55f83dfb\") " pod="openshift-must-gather-gqx5w/crc-debug-sz2q4" Oct 09 10:04:53 crc kubenswrapper[4710]: I1009 10:04:53.881371 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gt96h\" (UniqueName: \"kubernetes.io/projected/1a7a1e1a-2a97-413a-99b0-294d55f83dfb-kube-api-access-gt96h\") pod \"crc-debug-sz2q4\" (UID: \"1a7a1e1a-2a97-413a-99b0-294d55f83dfb\") " pod="openshift-must-gather-gqx5w/crc-debug-sz2q4" Oct 09 10:04:53 crc kubenswrapper[4710]: I1009 10:04:53.984516 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1a7a1e1a-2a97-413a-99b0-294d55f83dfb-host\") pod \"crc-debug-sz2q4\" (UID: \"1a7a1e1a-2a97-413a-99b0-294d55f83dfb\") " pod="openshift-must-gather-gqx5w/crc-debug-sz2q4" Oct 09 10:04:53 crc kubenswrapper[4710]: I1009 10:04:53.984588 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gt96h\" (UniqueName: \"kubernetes.io/projected/1a7a1e1a-2a97-413a-99b0-294d55f83dfb-kube-api-access-gt96h\") pod \"crc-debug-sz2q4\" (UID: \"1a7a1e1a-2a97-413a-99b0-294d55f83dfb\") " pod="openshift-must-gather-gqx5w/crc-debug-sz2q4" Oct 09 10:04:53 crc kubenswrapper[4710]: I1009 10:04:53.984714 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1a7a1e1a-2a97-413a-99b0-294d55f83dfb-host\") pod \"crc-debug-sz2q4\" (UID: \"1a7a1e1a-2a97-413a-99b0-294d55f83dfb\") " pod="openshift-must-gather-gqx5w/crc-debug-sz2q4" Oct 09 10:04:54 crc kubenswrapper[4710]: I1009 10:04:54.003965 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gt96h\" (UniqueName: \"kubernetes.io/projected/1a7a1e1a-2a97-413a-99b0-294d55f83dfb-kube-api-access-gt96h\") pod \"crc-debug-sz2q4\" (UID: \"1a7a1e1a-2a97-413a-99b0-294d55f83dfb\") " pod="openshift-must-gather-gqx5w/crc-debug-sz2q4" Oct 09 10:04:54 crc kubenswrapper[4710]: I1009 10:04:54.113960 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gqx5w/crc-debug-sz2q4" Oct 09 10:04:54 crc kubenswrapper[4710]: W1009 10:04:54.136381 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a7a1e1a_2a97_413a_99b0_294d55f83dfb.slice/crio-3e557763792d261645d32b208c50c1600bf6ca13359c508f9891893ed45d0d06 WatchSource:0}: Error finding container 3e557763792d261645d32b208c50c1600bf6ca13359c508f9891893ed45d0d06: Status 404 returned error can't find the container with id 3e557763792d261645d32b208c50c1600bf6ca13359c508f9891893ed45d0d06 Oct 09 10:04:54 crc kubenswrapper[4710]: I1009 10:04:54.372165 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gqx5w/crc-debug-sz2q4" event={"ID":"1a7a1e1a-2a97-413a-99b0-294d55f83dfb","Type":"ContainerStarted","Data":"3e557763792d261645d32b208c50c1600bf6ca13359c508f9891893ed45d0d06"} Oct 09 10:05:04 crc kubenswrapper[4710]: I1009 10:05:04.458672 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gqx5w/crc-debug-sz2q4" event={"ID":"1a7a1e1a-2a97-413a-99b0-294d55f83dfb","Type":"ContainerStarted","Data":"1ed176fab1467c9757de18ad57d8a13460a2576db7123184b7fe8459b344f607"} Oct 09 10:05:04 crc kubenswrapper[4710]: I1009 10:05:04.472807 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-gqx5w/crc-debug-sz2q4" podStartSLOduration=2.116346871 podStartE2EDuration="11.47279331s" podCreationTimestamp="2025-10-09 10:04:53 +0000 UTC" firstStartedPulling="2025-10-09 10:04:54.138345447 +0000 UTC m=+3617.628453844" lastFinishedPulling="2025-10-09 10:05:03.494791887 +0000 UTC m=+3626.984900283" observedRunningTime="2025-10-09 10:05:04.468028265 +0000 UTC m=+3627.958136662" watchObservedRunningTime="2025-10-09 10:05:04.47279331 +0000 UTC m=+3627.962901707" Oct 09 10:05:04 crc kubenswrapper[4710]: I1009 10:05:04.818450 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:05:04 crc kubenswrapper[4710]: E1009 10:05:04.818677 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:05:16 crc kubenswrapper[4710]: I1009 10:05:16.820112 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:05:16 crc kubenswrapper[4710]: E1009 10:05:16.820780 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:05:29 crc kubenswrapper[4710]: I1009 10:05:29.815383 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:05:29 crc kubenswrapper[4710]: E1009 10:05:29.816180 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:05:35 crc kubenswrapper[4710]: I1009 10:05:35.696181 4710 generic.go:334] "Generic (PLEG): container finished" podID="1a7a1e1a-2a97-413a-99b0-294d55f83dfb" containerID="1ed176fab1467c9757de18ad57d8a13460a2576db7123184b7fe8459b344f607" exitCode=0 Oct 09 10:05:35 crc kubenswrapper[4710]: I1009 10:05:35.696321 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gqx5w/crc-debug-sz2q4" event={"ID":"1a7a1e1a-2a97-413a-99b0-294d55f83dfb","Type":"ContainerDied","Data":"1ed176fab1467c9757de18ad57d8a13460a2576db7123184b7fe8459b344f607"} Oct 09 10:05:36 crc kubenswrapper[4710]: I1009 10:05:36.787194 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gqx5w/crc-debug-sz2q4" Oct 09 10:05:36 crc kubenswrapper[4710]: I1009 10:05:36.829734 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gt96h\" (UniqueName: \"kubernetes.io/projected/1a7a1e1a-2a97-413a-99b0-294d55f83dfb-kube-api-access-gt96h\") pod \"1a7a1e1a-2a97-413a-99b0-294d55f83dfb\" (UID: \"1a7a1e1a-2a97-413a-99b0-294d55f83dfb\") " Oct 09 10:05:36 crc kubenswrapper[4710]: I1009 10:05:36.830031 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1a7a1e1a-2a97-413a-99b0-294d55f83dfb-host\") pod \"1a7a1e1a-2a97-413a-99b0-294d55f83dfb\" (UID: \"1a7a1e1a-2a97-413a-99b0-294d55f83dfb\") " Oct 09 10:05:36 crc kubenswrapper[4710]: I1009 10:05:36.830752 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1a7a1e1a-2a97-413a-99b0-294d55f83dfb-host" (OuterVolumeSpecName: "host") pod "1a7a1e1a-2a97-413a-99b0-294d55f83dfb" (UID: "1a7a1e1a-2a97-413a-99b0-294d55f83dfb"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 10:05:36 crc kubenswrapper[4710]: I1009 10:05:36.841699 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-gqx5w/crc-debug-sz2q4"] Oct 09 10:05:36 crc kubenswrapper[4710]: I1009 10:05:36.841732 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-gqx5w/crc-debug-sz2q4"] Oct 09 10:05:36 crc kubenswrapper[4710]: I1009 10:05:36.862609 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a7a1e1a-2a97-413a-99b0-294d55f83dfb-kube-api-access-gt96h" (OuterVolumeSpecName: "kube-api-access-gt96h") pod "1a7a1e1a-2a97-413a-99b0-294d55f83dfb" (UID: "1a7a1e1a-2a97-413a-99b0-294d55f83dfb"). InnerVolumeSpecName "kube-api-access-gt96h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 10:05:36 crc kubenswrapper[4710]: I1009 10:05:36.932530 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gt96h\" (UniqueName: \"kubernetes.io/projected/1a7a1e1a-2a97-413a-99b0-294d55f83dfb-kube-api-access-gt96h\") on node \"crc\" DevicePath \"\"" Oct 09 10:05:36 crc kubenswrapper[4710]: I1009 10:05:36.932559 4710 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1a7a1e1a-2a97-413a-99b0-294d55f83dfb-host\") on node \"crc\" DevicePath \"\"" Oct 09 10:05:37 crc kubenswrapper[4710]: I1009 10:05:37.712676 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e557763792d261645d32b208c50c1600bf6ca13359c508f9891893ed45d0d06" Oct 09 10:05:37 crc kubenswrapper[4710]: I1009 10:05:37.712755 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gqx5w/crc-debug-sz2q4" Oct 09 10:05:37 crc kubenswrapper[4710]: I1009 10:05:37.991313 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-gqx5w/crc-debug-bqp7x"] Oct 09 10:05:37 crc kubenswrapper[4710]: E1009 10:05:37.992445 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a7a1e1a-2a97-413a-99b0-294d55f83dfb" containerName="container-00" Oct 09 10:05:37 crc kubenswrapper[4710]: I1009 10:05:37.992554 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a7a1e1a-2a97-413a-99b0-294d55f83dfb" containerName="container-00" Oct 09 10:05:37 crc kubenswrapper[4710]: I1009 10:05:37.992876 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a7a1e1a-2a97-413a-99b0-294d55f83dfb" containerName="container-00" Oct 09 10:05:37 crc kubenswrapper[4710]: I1009 10:05:37.993694 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gqx5w/crc-debug-bqp7x" Oct 09 10:05:38 crc kubenswrapper[4710]: I1009 10:05:38.055974 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvfxz\" (UniqueName: \"kubernetes.io/projected/5014ef98-5c5f-4692-914c-fd6769aa4c77-kube-api-access-pvfxz\") pod \"crc-debug-bqp7x\" (UID: \"5014ef98-5c5f-4692-914c-fd6769aa4c77\") " pod="openshift-must-gather-gqx5w/crc-debug-bqp7x" Oct 09 10:05:38 crc kubenswrapper[4710]: I1009 10:05:38.056069 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5014ef98-5c5f-4692-914c-fd6769aa4c77-host\") pod \"crc-debug-bqp7x\" (UID: \"5014ef98-5c5f-4692-914c-fd6769aa4c77\") " pod="openshift-must-gather-gqx5w/crc-debug-bqp7x" Oct 09 10:05:38 crc kubenswrapper[4710]: I1009 10:05:38.159730 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvfxz\" (UniqueName: \"kubernetes.io/projected/5014ef98-5c5f-4692-914c-fd6769aa4c77-kube-api-access-pvfxz\") pod \"crc-debug-bqp7x\" (UID: \"5014ef98-5c5f-4692-914c-fd6769aa4c77\") " pod="openshift-must-gather-gqx5w/crc-debug-bqp7x" Oct 09 10:05:38 crc kubenswrapper[4710]: I1009 10:05:38.160162 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5014ef98-5c5f-4692-914c-fd6769aa4c77-host\") pod \"crc-debug-bqp7x\" (UID: \"5014ef98-5c5f-4692-914c-fd6769aa4c77\") " pod="openshift-must-gather-gqx5w/crc-debug-bqp7x" Oct 09 10:05:38 crc kubenswrapper[4710]: I1009 10:05:38.160276 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5014ef98-5c5f-4692-914c-fd6769aa4c77-host\") pod \"crc-debug-bqp7x\" (UID: \"5014ef98-5c5f-4692-914c-fd6769aa4c77\") " pod="openshift-must-gather-gqx5w/crc-debug-bqp7x" Oct 09 10:05:38 crc kubenswrapper[4710]: I1009 10:05:38.182783 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvfxz\" (UniqueName: \"kubernetes.io/projected/5014ef98-5c5f-4692-914c-fd6769aa4c77-kube-api-access-pvfxz\") pod \"crc-debug-bqp7x\" (UID: \"5014ef98-5c5f-4692-914c-fd6769aa4c77\") " pod="openshift-must-gather-gqx5w/crc-debug-bqp7x" Oct 09 10:05:38 crc kubenswrapper[4710]: I1009 10:05:38.310214 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gqx5w/crc-debug-bqp7x" Oct 09 10:05:38 crc kubenswrapper[4710]: W1009 10:05:38.332576 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5014ef98_5c5f_4692_914c_fd6769aa4c77.slice/crio-eb8dc75ebe1fa2ba62e701d35df84110173d92e4e0bcd2bb832aac59fd7faad2 WatchSource:0}: Error finding container eb8dc75ebe1fa2ba62e701d35df84110173d92e4e0bcd2bb832aac59fd7faad2: Status 404 returned error can't find the container with id eb8dc75ebe1fa2ba62e701d35df84110173d92e4e0bcd2bb832aac59fd7faad2 Oct 09 10:05:38 crc kubenswrapper[4710]: I1009 10:05:38.719847 4710 generic.go:334] "Generic (PLEG): container finished" podID="5014ef98-5c5f-4692-914c-fd6769aa4c77" containerID="a78ebf55ae7807204183385a1c25d8cdb1fae2a7e92c6cf88ed0f39329166648" exitCode=0 Oct 09 10:05:38 crc kubenswrapper[4710]: I1009 10:05:38.719938 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gqx5w/crc-debug-bqp7x" event={"ID":"5014ef98-5c5f-4692-914c-fd6769aa4c77","Type":"ContainerDied","Data":"a78ebf55ae7807204183385a1c25d8cdb1fae2a7e92c6cf88ed0f39329166648"} Oct 09 10:05:38 crc kubenswrapper[4710]: I1009 10:05:38.720207 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gqx5w/crc-debug-bqp7x" event={"ID":"5014ef98-5c5f-4692-914c-fd6769aa4c77","Type":"ContainerStarted","Data":"eb8dc75ebe1fa2ba62e701d35df84110173d92e4e0bcd2bb832aac59fd7faad2"} Oct 09 10:05:38 crc kubenswrapper[4710]: I1009 10:05:38.822640 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a7a1e1a-2a97-413a-99b0-294d55f83dfb" path="/var/lib/kubelet/pods/1a7a1e1a-2a97-413a-99b0-294d55f83dfb/volumes" Oct 09 10:05:39 crc kubenswrapper[4710]: I1009 10:05:39.081493 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-gqx5w/crc-debug-bqp7x"] Oct 09 10:05:39 crc kubenswrapper[4710]: I1009 10:05:39.088705 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-gqx5w/crc-debug-bqp7x"] Oct 09 10:05:39 crc kubenswrapper[4710]: I1009 10:05:39.839457 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gqx5w/crc-debug-bqp7x" Oct 09 10:05:39 crc kubenswrapper[4710]: I1009 10:05:39.900982 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvfxz\" (UniqueName: \"kubernetes.io/projected/5014ef98-5c5f-4692-914c-fd6769aa4c77-kube-api-access-pvfxz\") pod \"5014ef98-5c5f-4692-914c-fd6769aa4c77\" (UID: \"5014ef98-5c5f-4692-914c-fd6769aa4c77\") " Oct 09 10:05:39 crc kubenswrapper[4710]: I1009 10:05:39.901197 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5014ef98-5c5f-4692-914c-fd6769aa4c77-host\") pod \"5014ef98-5c5f-4692-914c-fd6769aa4c77\" (UID: \"5014ef98-5c5f-4692-914c-fd6769aa4c77\") " Oct 09 10:05:39 crc kubenswrapper[4710]: I1009 10:05:39.901323 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5014ef98-5c5f-4692-914c-fd6769aa4c77-host" (OuterVolumeSpecName: "host") pod "5014ef98-5c5f-4692-914c-fd6769aa4c77" (UID: "5014ef98-5c5f-4692-914c-fd6769aa4c77"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 10:05:39 crc kubenswrapper[4710]: I1009 10:05:39.901873 4710 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5014ef98-5c5f-4692-914c-fd6769aa4c77-host\") on node \"crc\" DevicePath \"\"" Oct 09 10:05:39 crc kubenswrapper[4710]: I1009 10:05:39.913579 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5014ef98-5c5f-4692-914c-fd6769aa4c77-kube-api-access-pvfxz" (OuterVolumeSpecName: "kube-api-access-pvfxz") pod "5014ef98-5c5f-4692-914c-fd6769aa4c77" (UID: "5014ef98-5c5f-4692-914c-fd6769aa4c77"). InnerVolumeSpecName "kube-api-access-pvfxz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 10:05:40 crc kubenswrapper[4710]: I1009 10:05:40.003710 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvfxz\" (UniqueName: \"kubernetes.io/projected/5014ef98-5c5f-4692-914c-fd6769aa4c77-kube-api-access-pvfxz\") on node \"crc\" DevicePath \"\"" Oct 09 10:05:40 crc kubenswrapper[4710]: I1009 10:05:40.262823 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-gqx5w/crc-debug-s6d79"] Oct 09 10:05:40 crc kubenswrapper[4710]: E1009 10:05:40.263907 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5014ef98-5c5f-4692-914c-fd6769aa4c77" containerName="container-00" Oct 09 10:05:40 crc kubenswrapper[4710]: I1009 10:05:40.263930 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="5014ef98-5c5f-4692-914c-fd6769aa4c77" containerName="container-00" Oct 09 10:05:40 crc kubenswrapper[4710]: I1009 10:05:40.264179 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="5014ef98-5c5f-4692-914c-fd6769aa4c77" containerName="container-00" Oct 09 10:05:40 crc kubenswrapper[4710]: I1009 10:05:40.265399 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gqx5w/crc-debug-s6d79" Oct 09 10:05:40 crc kubenswrapper[4710]: I1009 10:05:40.320010 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5cdc3c34-1b98-4333-b92b-3925df2a36e2-host\") pod \"crc-debug-s6d79\" (UID: \"5cdc3c34-1b98-4333-b92b-3925df2a36e2\") " pod="openshift-must-gather-gqx5w/crc-debug-s6d79" Oct 09 10:05:40 crc kubenswrapper[4710]: I1009 10:05:40.320129 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9rgw\" (UniqueName: \"kubernetes.io/projected/5cdc3c34-1b98-4333-b92b-3925df2a36e2-kube-api-access-g9rgw\") pod \"crc-debug-s6d79\" (UID: \"5cdc3c34-1b98-4333-b92b-3925df2a36e2\") " pod="openshift-must-gather-gqx5w/crc-debug-s6d79" Oct 09 10:05:40 crc kubenswrapper[4710]: I1009 10:05:40.420936 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5cdc3c34-1b98-4333-b92b-3925df2a36e2-host\") pod \"crc-debug-s6d79\" (UID: \"5cdc3c34-1b98-4333-b92b-3925df2a36e2\") " pod="openshift-must-gather-gqx5w/crc-debug-s6d79" Oct 09 10:05:40 crc kubenswrapper[4710]: I1009 10:05:40.420997 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9rgw\" (UniqueName: \"kubernetes.io/projected/5cdc3c34-1b98-4333-b92b-3925df2a36e2-kube-api-access-g9rgw\") pod \"crc-debug-s6d79\" (UID: \"5cdc3c34-1b98-4333-b92b-3925df2a36e2\") " pod="openshift-must-gather-gqx5w/crc-debug-s6d79" Oct 09 10:05:40 crc kubenswrapper[4710]: I1009 10:05:40.421422 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5cdc3c34-1b98-4333-b92b-3925df2a36e2-host\") pod \"crc-debug-s6d79\" (UID: \"5cdc3c34-1b98-4333-b92b-3925df2a36e2\") " pod="openshift-must-gather-gqx5w/crc-debug-s6d79" Oct 09 10:05:40 crc kubenswrapper[4710]: I1009 10:05:40.437408 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9rgw\" (UniqueName: \"kubernetes.io/projected/5cdc3c34-1b98-4333-b92b-3925df2a36e2-kube-api-access-g9rgw\") pod \"crc-debug-s6d79\" (UID: \"5cdc3c34-1b98-4333-b92b-3925df2a36e2\") " pod="openshift-must-gather-gqx5w/crc-debug-s6d79" Oct 09 10:05:40 crc kubenswrapper[4710]: I1009 10:05:40.584743 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gqx5w/crc-debug-s6d79" Oct 09 10:05:40 crc kubenswrapper[4710]: W1009 10:05:40.613729 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5cdc3c34_1b98_4333_b92b_3925df2a36e2.slice/crio-5a965294bee71c862122b08e204a7c8e7324952c47d647138570129f2c1051cf WatchSource:0}: Error finding container 5a965294bee71c862122b08e204a7c8e7324952c47d647138570129f2c1051cf: Status 404 returned error can't find the container with id 5a965294bee71c862122b08e204a7c8e7324952c47d647138570129f2c1051cf Oct 09 10:05:40 crc kubenswrapper[4710]: I1009 10:05:40.750047 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gqx5w/crc-debug-s6d79" event={"ID":"5cdc3c34-1b98-4333-b92b-3925df2a36e2","Type":"ContainerStarted","Data":"5a965294bee71c862122b08e204a7c8e7324952c47d647138570129f2c1051cf"} Oct 09 10:05:40 crc kubenswrapper[4710]: I1009 10:05:40.751685 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb8dc75ebe1fa2ba62e701d35df84110173d92e4e0bcd2bb832aac59fd7faad2" Oct 09 10:05:40 crc kubenswrapper[4710]: I1009 10:05:40.751743 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gqx5w/crc-debug-bqp7x" Oct 09 10:05:40 crc kubenswrapper[4710]: I1009 10:05:40.825054 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5014ef98-5c5f-4692-914c-fd6769aa4c77" path="/var/lib/kubelet/pods/5014ef98-5c5f-4692-914c-fd6769aa4c77/volumes" Oct 09 10:05:41 crc kubenswrapper[4710]: I1009 10:05:41.760840 4710 generic.go:334] "Generic (PLEG): container finished" podID="5cdc3c34-1b98-4333-b92b-3925df2a36e2" containerID="07f93f70865489c8d6727609599c66abc71e138a490ecb1bca83f2ed28e086b8" exitCode=0 Oct 09 10:05:41 crc kubenswrapper[4710]: I1009 10:05:41.760930 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gqx5w/crc-debug-s6d79" event={"ID":"5cdc3c34-1b98-4333-b92b-3925df2a36e2","Type":"ContainerDied","Data":"07f93f70865489c8d6727609599c66abc71e138a490ecb1bca83f2ed28e086b8"} Oct 09 10:05:41 crc kubenswrapper[4710]: I1009 10:05:41.792406 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-gqx5w/crc-debug-s6d79"] Oct 09 10:05:41 crc kubenswrapper[4710]: I1009 10:05:41.800905 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-gqx5w/crc-debug-s6d79"] Oct 09 10:05:42 crc kubenswrapper[4710]: I1009 10:05:42.849931 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gqx5w/crc-debug-s6d79" Oct 09 10:05:42 crc kubenswrapper[4710]: I1009 10:05:42.976254 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5cdc3c34-1b98-4333-b92b-3925df2a36e2-host\") pod \"5cdc3c34-1b98-4333-b92b-3925df2a36e2\" (UID: \"5cdc3c34-1b98-4333-b92b-3925df2a36e2\") " Oct 09 10:05:42 crc kubenswrapper[4710]: I1009 10:05:42.976355 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5cdc3c34-1b98-4333-b92b-3925df2a36e2-host" (OuterVolumeSpecName: "host") pod "5cdc3c34-1b98-4333-b92b-3925df2a36e2" (UID: "5cdc3c34-1b98-4333-b92b-3925df2a36e2"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 10:05:42 crc kubenswrapper[4710]: I1009 10:05:42.976903 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g9rgw\" (UniqueName: \"kubernetes.io/projected/5cdc3c34-1b98-4333-b92b-3925df2a36e2-kube-api-access-g9rgw\") pod \"5cdc3c34-1b98-4333-b92b-3925df2a36e2\" (UID: \"5cdc3c34-1b98-4333-b92b-3925df2a36e2\") " Oct 09 10:05:42 crc kubenswrapper[4710]: I1009 10:05:42.977477 4710 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5cdc3c34-1b98-4333-b92b-3925df2a36e2-host\") on node \"crc\" DevicePath \"\"" Oct 09 10:05:42 crc kubenswrapper[4710]: I1009 10:05:42.981672 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cdc3c34-1b98-4333-b92b-3925df2a36e2-kube-api-access-g9rgw" (OuterVolumeSpecName: "kube-api-access-g9rgw") pod "5cdc3c34-1b98-4333-b92b-3925df2a36e2" (UID: "5cdc3c34-1b98-4333-b92b-3925df2a36e2"). InnerVolumeSpecName "kube-api-access-g9rgw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 10:05:43 crc kubenswrapper[4710]: I1009 10:05:43.080289 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g9rgw\" (UniqueName: \"kubernetes.io/projected/5cdc3c34-1b98-4333-b92b-3925df2a36e2-kube-api-access-g9rgw\") on node \"crc\" DevicePath \"\"" Oct 09 10:05:43 crc kubenswrapper[4710]: I1009 10:05:43.778172 4710 scope.go:117] "RemoveContainer" containerID="07f93f70865489c8d6727609599c66abc71e138a490ecb1bca83f2ed28e086b8" Oct 09 10:05:43 crc kubenswrapper[4710]: I1009 10:05:43.778205 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gqx5w/crc-debug-s6d79" Oct 09 10:05:43 crc kubenswrapper[4710]: I1009 10:05:43.816827 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:05:43 crc kubenswrapper[4710]: E1009 10:05:43.817087 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:05:44 crc kubenswrapper[4710]: I1009 10:05:44.823582 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cdc3c34-1b98-4333-b92b-3925df2a36e2" path="/var/lib/kubelet/pods/5cdc3c34-1b98-4333-b92b-3925df2a36e2/volumes" Oct 09 10:05:55 crc kubenswrapper[4710]: I1009 10:05:55.815571 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:05:55 crc kubenswrapper[4710]: E1009 10:05:55.816549 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:06:04 crc kubenswrapper[4710]: I1009 10:06:04.691532 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7c69bd8f44-7wmj7_d11eb962-d716-4dcf-9ec6-f82e6969640f/barbican-api/0.log" Oct 09 10:06:04 crc kubenswrapper[4710]: I1009 10:06:04.755025 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7c69bd8f44-7wmj7_d11eb962-d716-4dcf-9ec6-f82e6969640f/barbican-api-log/0.log" Oct 09 10:06:04 crc kubenswrapper[4710]: I1009 10:06:04.921329 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6b5698f678-pqrnp_5a03d872-b139-414d-a62f-953c23fb01a6/barbican-keystone-listener/0.log" Oct 09 10:06:05 crc kubenswrapper[4710]: I1009 10:06:05.062482 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6b5698f678-pqrnp_5a03d872-b139-414d-a62f-953c23fb01a6/barbican-keystone-listener-log/0.log" Oct 09 10:06:05 crc kubenswrapper[4710]: I1009 10:06:05.233211 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-768bd74b7c-lmgpx_c369359e-6e4c-478b-8ef5-0ebd384acbd8/barbican-worker/0.log" Oct 09 10:06:05 crc kubenswrapper[4710]: I1009 10:06:05.290192 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-768bd74b7c-lmgpx_c369359e-6e4c-478b-8ef5-0ebd384acbd8/barbican-worker-log/0.log" Oct 09 10:06:05 crc kubenswrapper[4710]: I1009 10:06:05.471804 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc_1d4d6d90-9993-4a75-8ea7-e6d488a370b0/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:06:05 crc kubenswrapper[4710]: I1009 10:06:05.678694 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c38d2c59-6c38-4516-bc9c-793e554b04c6/ceilometer-central-agent/0.log" Oct 09 10:06:05 crc kubenswrapper[4710]: I1009 10:06:05.815454 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c38d2c59-6c38-4516-bc9c-793e554b04c6/ceilometer-notification-agent/0.log" Oct 09 10:06:05 crc kubenswrapper[4710]: I1009 10:06:05.836754 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c38d2c59-6c38-4516-bc9c-793e554b04c6/proxy-httpd/0.log" Oct 09 10:06:05 crc kubenswrapper[4710]: I1009 10:06:05.952498 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c38d2c59-6c38-4516-bc9c-793e554b04c6/sg-core/0.log" Oct 09 10:06:06 crc kubenswrapper[4710]: I1009 10:06:06.116608 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2_18a1b835-afd2-4ceb-ad50-156b24a80601/ceph-client-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:06:06 crc kubenswrapper[4710]: I1009 10:06:06.260892 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk_f25f8196-f0c2-4299-8488-0538f69a70a0/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:06:06 crc kubenswrapper[4710]: I1009 10:06:06.614589 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1/cinder-api/0.log" Oct 09 10:06:06 crc kubenswrapper[4710]: I1009 10:06:06.637226 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1/cinder-api-log/0.log" Oct 09 10:06:06 crc kubenswrapper[4710]: I1009 10:06:06.790538 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_773237c7-043e-4e8c-a646-6a24ab6cf3d5/probe/0.log" Oct 09 10:06:06 crc kubenswrapper[4710]: I1009 10:06:06.873043 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_773237c7-043e-4e8c-a646-6a24ab6cf3d5/cinder-backup/0.log" Oct 09 10:06:06 crc kubenswrapper[4710]: I1009 10:06:06.999111 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9883d700-efac-4450-81db-b1faf06dc645/cinder-scheduler/0.log" Oct 09 10:06:07 crc kubenswrapper[4710]: I1009 10:06:07.079683 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9883d700-efac-4450-81db-b1faf06dc645/probe/0.log" Oct 09 10:06:07 crc kubenswrapper[4710]: I1009 10:06:07.182573 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_9c352150-914d-40e6-8eb2-ecbf97b33bbc/probe/0.log" Oct 09 10:06:07 crc kubenswrapper[4710]: I1009 10:06:07.219546 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_9c352150-914d-40e6-8eb2-ecbf97b33bbc/cinder-volume/0.log" Oct 09 10:06:07 crc kubenswrapper[4710]: I1009 10:06:07.343861 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p_fadd281b-1db6-4170-8ddd-12e4b65a8e5a/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:06:07 crc kubenswrapper[4710]: I1009 10:06:07.422777 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb_5a884cb4-f253-4c96-9e29-5e60aff6f144/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:06:07 crc kubenswrapper[4710]: I1009 10:06:07.519279 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7887c4559f-7d5cl_892059c2-6b0b-48d9-ba51-fb86b0856c4a/init/0.log" Oct 09 10:06:07 crc kubenswrapper[4710]: I1009 10:06:07.637876 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7887c4559f-7d5cl_892059c2-6b0b-48d9-ba51-fb86b0856c4a/init/0.log" Oct 09 10:06:07 crc kubenswrapper[4710]: I1009 10:06:07.677326 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_353d2111-74ef-4b2e-b17d-5e0672f1a33d/glance-httpd/0.log" Oct 09 10:06:07 crc kubenswrapper[4710]: I1009 10:06:07.840202 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_353d2111-74ef-4b2e-b17d-5e0672f1a33d/glance-log/0.log" Oct 09 10:06:07 crc kubenswrapper[4710]: I1009 10:06:07.940100 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_48c52c4c-a94f-4044-bbbe-9c8e935f1a9c/glance-httpd/0.log" Oct 09 10:06:08 crc kubenswrapper[4710]: I1009 10:06:08.025751 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_48c52c4c-a94f-4044-bbbe-9c8e935f1a9c/glance-log/0.log" Oct 09 10:06:08 crc kubenswrapper[4710]: I1009 10:06:08.308388 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-67d94d7dc8-fvmp7_13eb4841-8d3a-4ef6-a2da-656bab482ab4/horizon/0.log" Oct 09 10:06:08 crc kubenswrapper[4710]: I1009 10:06:08.377239 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-67d94d7dc8-fvmp7_13eb4841-8d3a-4ef6-a2da-656bab482ab4/horizon-log/0.log" Oct 09 10:06:08 crc kubenswrapper[4710]: I1009 10:06:08.512661 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh_77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:06:08 crc kubenswrapper[4710]: I1009 10:06:08.621933 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-wtmjh_95b718d5-c979-4a2f-82a5-e0915b769b7a/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:06:08 crc kubenswrapper[4710]: I1009 10:06:08.886778 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7746794c77-ljwdn_aa709644-7781-443c-b0e3-d5936fff1dde/keystone-api/0.log" Oct 09 10:06:08 crc kubenswrapper[4710]: I1009 10:06:08.918716 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29333401-56psm_208c883f-3974-4994-802b-6c3bcc416326/keystone-cron/0.log" Oct 09 10:06:09 crc kubenswrapper[4710]: I1009 10:06:09.560901 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_5f2955ba-a1b4-4cad-8c8b-35d74d914474/kube-state-metrics/0.log" Oct 09 10:06:09 crc kubenswrapper[4710]: I1009 10:06:09.703410 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq_3197a42e-d565-4f24-9115-990a46dfc659/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:06:09 crc kubenswrapper[4710]: I1009 10:06:09.934717 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_b107b875-5e01-45ce-8702-dcc78cd19193/manila-api-log/0.log" Oct 09 10:06:09 crc kubenswrapper[4710]: I1009 10:06:09.985715 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_b107b875-5e01-45ce-8702-dcc78cd19193/manila-api/0.log" Oct 09 10:06:10 crc kubenswrapper[4710]: I1009 10:06:10.024952 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7887c4559f-7d5cl_892059c2-6b0b-48d9-ba51-fb86b0856c4a/dnsmasq-dns/0.log" Oct 09 10:06:10 crc kubenswrapper[4710]: I1009 10:06:10.194134 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_f91da5bb-5573-4165-8592-6dc828d64596/manila-scheduler/0.log" Oct 09 10:06:10 crc kubenswrapper[4710]: I1009 10:06:10.218894 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_f91da5bb-5573-4165-8592-6dc828d64596/probe/0.log" Oct 09 10:06:10 crc kubenswrapper[4710]: I1009 10:06:10.305524 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_3efa48f2-2c46-4963-aed1-aa57cb9ada01/manila-share/0.log" Oct 09 10:06:10 crc kubenswrapper[4710]: I1009 10:06:10.762662 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_3efa48f2-2c46-4963-aed1-aa57cb9ada01/probe/0.log" Oct 09 10:06:10 crc kubenswrapper[4710]: I1009 10:06:10.814568 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:06:10 crc kubenswrapper[4710]: E1009 10:06:10.815385 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:06:11 crc kubenswrapper[4710]: I1009 10:06:11.058791 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-bf8dfcdb5-zd6wv_ff8c79d6-681e-4c93-b80e-15c8ff06d6af/neutron-httpd/0.log" Oct 09 10:06:11 crc kubenswrapper[4710]: I1009 10:06:11.071199 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-bf8dfcdb5-zd6wv_ff8c79d6-681e-4c93-b80e-15c8ff06d6af/neutron-api/0.log" Oct 09 10:06:11 crc kubenswrapper[4710]: I1009 10:06:11.281486 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl_7688fa72-f35a-4dd1-a1de-1eda8bf5ff77/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:06:11 crc kubenswrapper[4710]: I1009 10:06:11.635974 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_61eb69de-ee17-4084-95dc-0192a6d4a0d4/nova-api-log/0.log" Oct 09 10:06:11 crc kubenswrapper[4710]: I1009 10:06:11.694889 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_79cbbde5-3252-4efd-a000-95ec002a56bb/nova-cell0-conductor-conductor/0.log" Oct 09 10:06:11 crc kubenswrapper[4710]: I1009 10:06:11.826247 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_61eb69de-ee17-4084-95dc-0192a6d4a0d4/nova-api-api/0.log" Oct 09 10:06:11 crc kubenswrapper[4710]: I1009 10:06:11.956953 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_d7541f91-8a95-4a0e-9cdd-95252f38710b/nova-cell1-conductor-conductor/0.log" Oct 09 10:06:12 crc kubenswrapper[4710]: I1009 10:06:12.040750 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4/nova-cell1-novncproxy-novncproxy/0.log" Oct 09 10:06:12 crc kubenswrapper[4710]: I1009 10:06:12.226804 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v_f9e2c502-e067-49c7-b805-adc3d054f0cf/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:06:12 crc kubenswrapper[4710]: I1009 10:06:12.401917 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_0a140770-37ca-4b77-8eed-bc3ecfed72db/nova-metadata-log/0.log" Oct 09 10:06:12 crc kubenswrapper[4710]: I1009 10:06:12.685185 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_e717823b-a1aa-46c0-b1a6-be9ada2d596f/nova-scheduler-scheduler/0.log" Oct 09 10:06:12 crc kubenswrapper[4710]: I1009 10:06:12.685805 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_57584eef-cc17-41b8-a307-663a50cbf568/mysql-bootstrap/0.log" Oct 09 10:06:12 crc kubenswrapper[4710]: I1009 10:06:12.969866 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_57584eef-cc17-41b8-a307-663a50cbf568/galera/0.log" Oct 09 10:06:12 crc kubenswrapper[4710]: I1009 10:06:12.987143 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_57584eef-cc17-41b8-a307-663a50cbf568/mysql-bootstrap/0.log" Oct 09 10:06:13 crc kubenswrapper[4710]: I1009 10:06:13.244660 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_bc62be28-ccca-4b4b-b7d0-afabccec4047/mysql-bootstrap/0.log" Oct 09 10:06:13 crc kubenswrapper[4710]: I1009 10:06:13.345150 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_bc62be28-ccca-4b4b-b7d0-afabccec4047/mysql-bootstrap/0.log" Oct 09 10:06:13 crc kubenswrapper[4710]: I1009 10:06:13.425465 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_bc62be28-ccca-4b4b-b7d0-afabccec4047/galera/0.log" Oct 09 10:06:13 crc kubenswrapper[4710]: I1009 10:06:13.437112 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_0a140770-37ca-4b77-8eed-bc3ecfed72db/nova-metadata-metadata/0.log" Oct 09 10:06:13 crc kubenswrapper[4710]: I1009 10:06:13.608450 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ff4n7_8c4f1833-b6b6-4c51-bd5e-0b4cf749e848/ovn-controller/0.log" Oct 09 10:06:13 crc kubenswrapper[4710]: I1009 10:06:13.673799 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_b7779e6d-a6fb-4d03-8636-0dafb2767cbc/openstackclient/0.log" Oct 09 10:06:13 crc kubenswrapper[4710]: I1009 10:06:13.943583 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-vcl7d_50c1f1ba-8a7d-482b-841f-591355f9dd44/openstack-network-exporter/0.log" Oct 09 10:06:13 crc kubenswrapper[4710]: I1009 10:06:13.956003 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-b2pbr_72df4789-6551-4a9f-a8ba-6ed6f43f03f9/ovsdb-server-init/0.log" Oct 09 10:06:14 crc kubenswrapper[4710]: I1009 10:06:14.149594 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-b2pbr_72df4789-6551-4a9f-a8ba-6ed6f43f03f9/ovsdb-server/0.log" Oct 09 10:06:14 crc kubenswrapper[4710]: I1009 10:06:14.231046 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-b2pbr_72df4789-6551-4a9f-a8ba-6ed6f43f03f9/ovsdb-server-init/0.log" Oct 09 10:06:14 crc kubenswrapper[4710]: I1009 10:06:14.247169 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-b2pbr_72df4789-6551-4a9f-a8ba-6ed6f43f03f9/ovs-vswitchd/0.log" Oct 09 10:06:14 crc kubenswrapper[4710]: I1009 10:06:14.384202 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-fkddm_30e0d33b-9f4a-4209-ad66-d5f51af8deea/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:06:14 crc kubenswrapper[4710]: I1009 10:06:14.525871 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_4b6b2be0-00ba-434b-9310-32ee9f286c71/ovn-northd/0.log" Oct 09 10:06:14 crc kubenswrapper[4710]: I1009 10:06:14.534344 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_4b6b2be0-00ba-434b-9310-32ee9f286c71/openstack-network-exporter/0.log" Oct 09 10:06:14 crc kubenswrapper[4710]: I1009 10:06:14.718096 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_958e4ef0-c38c-411d-9893-bac75789df76/ovsdbserver-nb/0.log" Oct 09 10:06:14 crc kubenswrapper[4710]: I1009 10:06:14.720302 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_958e4ef0-c38c-411d-9893-bac75789df76/openstack-network-exporter/0.log" Oct 09 10:06:14 crc kubenswrapper[4710]: I1009 10:06:14.803526 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f/openstack-network-exporter/0.log" Oct 09 10:06:14 crc kubenswrapper[4710]: I1009 10:06:14.944752 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f/ovsdbserver-sb/0.log" Oct 09 10:06:15 crc kubenswrapper[4710]: I1009 10:06:15.071806 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-685c5ffc88-49dzx_a038f2bb-2364-4a8d-918c-a0776dfa8458/placement-api/0.log" Oct 09 10:06:15 crc kubenswrapper[4710]: I1009 10:06:15.179250 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-685c5ffc88-49dzx_a038f2bb-2364-4a8d-918c-a0776dfa8458/placement-log/0.log" Oct 09 10:06:15 crc kubenswrapper[4710]: I1009 10:06:15.279916 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_3011d32f-6110-456d-a247-f6298b1d46e3/setup-container/0.log" Oct 09 10:06:15 crc kubenswrapper[4710]: I1009 10:06:15.431683 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_3011d32f-6110-456d-a247-f6298b1d46e3/rabbitmq/0.log" Oct 09 10:06:15 crc kubenswrapper[4710]: I1009 10:06:15.482611 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_3011d32f-6110-456d-a247-f6298b1d46e3/setup-container/0.log" Oct 09 10:06:15 crc kubenswrapper[4710]: I1009 10:06:15.589333 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_818b872a-e3f5-475f-ac6d-99810ac2f39b/setup-container/0.log" Oct 09 10:06:15 crc kubenswrapper[4710]: I1009 10:06:15.665950 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_818b872a-e3f5-475f-ac6d-99810ac2f39b/setup-container/0.log" Oct 09 10:06:15 crc kubenswrapper[4710]: I1009 10:06:15.687040 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_818b872a-e3f5-475f-ac6d-99810ac2f39b/rabbitmq/0.log" Oct 09 10:06:15 crc kubenswrapper[4710]: I1009 10:06:15.809688 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs_cdbaf739-8dd9-457f-97a5-8ddbcff386ea/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:06:15 crc kubenswrapper[4710]: I1009 10:06:15.968359 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv_0394728a-d605-415c-afcf-5f52e6b3bcac/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:06:16 crc kubenswrapper[4710]: I1009 10:06:16.061663 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-296ks_3015dc52-bcf5-444b-9200-82a3f79b0fcb/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:06:16 crc kubenswrapper[4710]: I1009 10:06:16.735570 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_36f8a9d9-4342-4b4f-921b-a0acbe1215db/tempest-tests-tempest-tests-runner/0.log" Oct 09 10:06:16 crc kubenswrapper[4710]: I1009 10:06:16.767118 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-6kwm8_9910b475-f4c5-49db-b431-b7214908cf77/ssh-known-hosts-edpm-deployment/0.log" Oct 09 10:06:16 crc kubenswrapper[4710]: I1009 10:06:16.978203 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_b4ec062d-c82a-4a34-801c-4a290d15c32e/test-operator-logs-container/0.log" Oct 09 10:06:16 crc kubenswrapper[4710]: I1009 10:06:16.979173 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc_08cf9289-9c53-4831-9bf9-3e0b70a457d5/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:06:21 crc kubenswrapper[4710]: I1009 10:06:21.815962 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:06:21 crc kubenswrapper[4710]: E1009 10:06:21.821049 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:06:27 crc kubenswrapper[4710]: I1009 10:06:27.706320 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_163b8a88-dc31-4540-a39b-bfecc81ce8aa/memcached/0.log" Oct 09 10:06:33 crc kubenswrapper[4710]: I1009 10:06:33.815694 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:06:33 crc kubenswrapper[4710]: E1009 10:06:33.816822 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:06:42 crc kubenswrapper[4710]: I1009 10:06:42.950800 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt_cfbeb551-9915-4071-a67c-5a88443100f1/util/0.log" Oct 09 10:06:43 crc kubenswrapper[4710]: I1009 10:06:43.447499 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt_cfbeb551-9915-4071-a67c-5a88443100f1/pull/0.log" Oct 09 10:06:43 crc kubenswrapper[4710]: I1009 10:06:43.498470 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt_cfbeb551-9915-4071-a67c-5a88443100f1/util/0.log" Oct 09 10:06:43 crc kubenswrapper[4710]: I1009 10:06:43.508612 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt_cfbeb551-9915-4071-a67c-5a88443100f1/pull/0.log" Oct 09 10:06:43 crc kubenswrapper[4710]: I1009 10:06:43.649304 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt_cfbeb551-9915-4071-a67c-5a88443100f1/util/0.log" Oct 09 10:06:43 crc kubenswrapper[4710]: I1009 10:06:43.663573 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt_cfbeb551-9915-4071-a67c-5a88443100f1/extract/0.log" Oct 09 10:06:43 crc kubenswrapper[4710]: I1009 10:06:43.724394 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt_cfbeb551-9915-4071-a67c-5a88443100f1/pull/0.log" Oct 09 10:06:43 crc kubenswrapper[4710]: I1009 10:06:43.825123 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-658bdf4b74-mvjkw_8437ff8a-3892-464b-963b-d5afaf9599dc/kube-rbac-proxy/0.log" Oct 09 10:06:43 crc kubenswrapper[4710]: I1009 10:06:43.871516 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-658bdf4b74-mvjkw_8437ff8a-3892-464b-963b-d5afaf9599dc/manager/0.log" Oct 09 10:06:43 crc kubenswrapper[4710]: I1009 10:06:43.956992 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7b7fb68549-7t5cw_26ad6e31-7002-4043-a971-aa507f4118bf/kube-rbac-proxy/0.log" Oct 09 10:06:44 crc kubenswrapper[4710]: I1009 10:06:44.065654 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7b7fb68549-7t5cw_26ad6e31-7002-4043-a971-aa507f4118bf/manager/0.log" Oct 09 10:06:44 crc kubenswrapper[4710]: I1009 10:06:44.153027 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-85d5d9dd78-qq9g6_5fc82f31-455f-4960-8538-5315e1a3a09a/kube-rbac-proxy/0.log" Oct 09 10:06:44 crc kubenswrapper[4710]: I1009 10:06:44.162267 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-85d5d9dd78-qq9g6_5fc82f31-455f-4960-8538-5315e1a3a09a/manager/0.log" Oct 09 10:06:44 crc kubenswrapper[4710]: I1009 10:06:44.284855 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84b9b84486-4szsc_10de6ce0-7aa9-471d-8b90-c44fb9bb0ab6/kube-rbac-proxy/0.log" Oct 09 10:06:44 crc kubenswrapper[4710]: I1009 10:06:44.400260 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84b9b84486-4szsc_10de6ce0-7aa9-471d-8b90-c44fb9bb0ab6/manager/0.log" Oct 09 10:06:44 crc kubenswrapper[4710]: I1009 10:06:44.461398 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-858f76bbdd-qhvrk_1438d002-6055-453b-8a7a-c83888b37429/kube-rbac-proxy/0.log" Oct 09 10:06:44 crc kubenswrapper[4710]: I1009 10:06:44.498853 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-858f76bbdd-qhvrk_1438d002-6055-453b-8a7a-c83888b37429/manager/0.log" Oct 09 10:06:44 crc kubenswrapper[4710]: I1009 10:06:44.577147 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-7ffbcb7588-svtkg_3b555e01-0210-431d-83ab-97ebcc53a68b/kube-rbac-proxy/0.log" Oct 09 10:06:44 crc kubenswrapper[4710]: I1009 10:06:44.650322 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-7ffbcb7588-svtkg_3b555e01-0210-431d-83ab-97ebcc53a68b/manager/0.log" Oct 09 10:06:45 crc kubenswrapper[4710]: I1009 10:06:45.077070 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-656bcbd775-m95bh_84815f80-0c57-4246-abe3-7c54bd77d1c1/kube-rbac-proxy/0.log" Oct 09 10:06:45 crc kubenswrapper[4710]: I1009 10:06:45.201678 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-656bcbd775-m95bh_84815f80-0c57-4246-abe3-7c54bd77d1c1/manager/0.log" Oct 09 10:06:45 crc kubenswrapper[4710]: I1009 10:06:45.314887 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-9c5c78d49-dhvdq_21d3fd5d-1f17-45d0-bf73-59fdc7211820/manager/0.log" Oct 09 10:06:45 crc kubenswrapper[4710]: I1009 10:06:45.374156 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-9c5c78d49-dhvdq_21d3fd5d-1f17-45d0-bf73-59fdc7211820/kube-rbac-proxy/0.log" Oct 09 10:06:45 crc kubenswrapper[4710]: I1009 10:06:45.402534 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-55b6b7c7b8-jb2h2_f78f287b-b34d-40c0-ad99-caaf90bc2ae7/kube-rbac-proxy/0.log" Oct 09 10:06:45 crc kubenswrapper[4710]: I1009 10:06:45.612982 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-55b6b7c7b8-jb2h2_f78f287b-b34d-40c0-ad99-caaf90bc2ae7/manager/0.log" Oct 09 10:06:45 crc kubenswrapper[4710]: I1009 10:06:45.716908 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5f67fbc655-2htjv_fa97dde8-95a7-4c4b-820d-d889545d79d5/kube-rbac-proxy/0.log" Oct 09 10:06:45 crc kubenswrapper[4710]: I1009 10:06:45.760958 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5f67fbc655-2htjv_fa97dde8-95a7-4c4b-820d-d889545d79d5/manager/0.log" Oct 09 10:06:45 crc kubenswrapper[4710]: I1009 10:06:45.922308 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-f9fb45f8f-6lbkw_72cf445d-90ac-4c98-a1df-3a3a4c2f0c7a/kube-rbac-proxy/0.log" Oct 09 10:06:45 crc kubenswrapper[4710]: I1009 10:06:45.994375 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-f9fb45f8f-6lbkw_72cf445d-90ac-4c98-a1df-3a3a4c2f0c7a/manager/0.log" Oct 09 10:06:46 crc kubenswrapper[4710]: I1009 10:06:46.166407 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-79d585cb66-d7sqz_d6f9aa15-3eb4-403a-b4fc-4af18c14d4bf/kube-rbac-proxy/0.log" Oct 09 10:06:46 crc kubenswrapper[4710]: I1009 10:06:46.211479 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-79d585cb66-d7sqz_d6f9aa15-3eb4-403a-b4fc-4af18c14d4bf/manager/0.log" Oct 09 10:06:46 crc kubenswrapper[4710]: I1009 10:06:46.248867 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-5df598886f-sd5xk_51113172-27cd-47a3-8bc2-b751cb1654f7/kube-rbac-proxy/0.log" Oct 09 10:06:46 crc kubenswrapper[4710]: I1009 10:06:46.478620 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-5df598886f-sd5xk_51113172-27cd-47a3-8bc2-b751cb1654f7/manager/0.log" Oct 09 10:06:46 crc kubenswrapper[4710]: I1009 10:06:46.496689 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-69fdcfc5f5-nmmkl_addc94b4-bdbb-4a05-993d-5a7ac2bb3e19/kube-rbac-proxy/0.log" Oct 09 10:06:46 crc kubenswrapper[4710]: I1009 10:06:46.522950 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-69fdcfc5f5-nmmkl_addc94b4-bdbb-4a05-993d-5a7ac2bb3e19/manager/0.log" Oct 09 10:06:46 crc kubenswrapper[4710]: I1009 10:06:46.657548 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj_cce3973b-b375-4ea1-b907-0f46e330dfae/manager/0.log" Oct 09 10:06:46 crc kubenswrapper[4710]: I1009 10:06:46.665060 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj_cce3973b-b375-4ea1-b907-0f46e330dfae/kube-rbac-proxy/0.log" Oct 09 10:06:46 crc kubenswrapper[4710]: I1009 10:06:46.715648 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7d6957655c-6trjl_eda25b03-4fb3-4ace-803c-1d1800196995/kube-rbac-proxy/0.log" Oct 09 10:06:46 crc kubenswrapper[4710]: I1009 10:06:46.832103 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:06:46 crc kubenswrapper[4710]: E1009 10:06:46.832491 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:06:46 crc kubenswrapper[4710]: I1009 10:06:46.981971 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-848c57cb5c-lrs49_f23bbd5e-3d87-4396-aad0-9455c284fbf8/kube-rbac-proxy/0.log" Oct 09 10:06:47 crc kubenswrapper[4710]: I1009 10:06:47.097910 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-848c57cb5c-lrs49_f23bbd5e-3d87-4396-aad0-9455c284fbf8/operator/0.log" Oct 09 10:06:47 crc kubenswrapper[4710]: I1009 10:06:47.270070 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-nlptw_ea70cec4-b9bc-48b8-8871-034a6d5b392a/registry-server/0.log" Oct 09 10:06:47 crc kubenswrapper[4710]: I1009 10:06:47.303531 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-79db49b9fb-vnvl6_4a08f5f1-bab4-425c-b81c-b48f2d4a186b/kube-rbac-proxy/0.log" Oct 09 10:06:47 crc kubenswrapper[4710]: I1009 10:06:47.418748 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-79db49b9fb-vnvl6_4a08f5f1-bab4-425c-b81c-b48f2d4a186b/manager/0.log" Oct 09 10:06:47 crc kubenswrapper[4710]: I1009 10:06:47.569112 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-68b6c87b68-lghdj_2e6376a2-edb7-4958-b3b3-3a6773782349/kube-rbac-proxy/0.log" Oct 09 10:06:47 crc kubenswrapper[4710]: I1009 10:06:47.680375 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-68b6c87b68-lghdj_2e6376a2-edb7-4958-b3b3-3a6773782349/manager/0.log" Oct 09 10:06:47 crc kubenswrapper[4710]: I1009 10:06:47.896396 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-292ft_eb73b966-fd38-499c-a018-d28ad9acda92/operator/0.log" Oct 09 10:06:47 crc kubenswrapper[4710]: I1009 10:06:47.987913 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-db6d7f97b-rh8n6_9f1f2915-bd9f-496f-a513-e1fd022ee463/kube-rbac-proxy/0.log" Oct 09 10:06:47 crc kubenswrapper[4710]: I1009 10:06:47.991814 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7d6957655c-6trjl_eda25b03-4fb3-4ace-803c-1d1800196995/manager/0.log" Oct 09 10:06:48 crc kubenswrapper[4710]: I1009 10:06:48.058049 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-db6d7f97b-rh8n6_9f1f2915-bd9f-496f-a513-e1fd022ee463/manager/0.log" Oct 09 10:06:48 crc kubenswrapper[4710]: I1009 10:06:48.059215 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-67cfc6749b-t69gh_2c55fbcc-5995-4a59-b8ae-dc8be7411fa8/kube-rbac-proxy/0.log" Oct 09 10:06:48 crc kubenswrapper[4710]: I1009 10:06:48.254561 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-67cfc6749b-t69gh_2c55fbcc-5995-4a59-b8ae-dc8be7411fa8/manager/0.log" Oct 09 10:06:48 crc kubenswrapper[4710]: I1009 10:06:48.343449 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5458f77c4-fzwm9_b8dd2f89-b87d-4669-8c6e-7c8035b6fcd3/manager/0.log" Oct 09 10:06:48 crc kubenswrapper[4710]: I1009 10:06:48.363278 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5458f77c4-fzwm9_b8dd2f89-b87d-4669-8c6e-7c8035b6fcd3/kube-rbac-proxy/0.log" Oct 09 10:06:48 crc kubenswrapper[4710]: I1009 10:06:48.448873 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-7f554bff7b-759pl_94e1d5bc-92ce-4e9a-b0c7-bef14a881f9e/kube-rbac-proxy/0.log" Oct 09 10:06:48 crc kubenswrapper[4710]: I1009 10:06:48.478144 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-7f554bff7b-759pl_94e1d5bc-92ce-4e9a-b0c7-bef14a881f9e/manager/0.log" Oct 09 10:06:57 crc kubenswrapper[4710]: I1009 10:06:57.814877 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:06:57 crc kubenswrapper[4710]: E1009 10:06:57.816386 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:07:02 crc kubenswrapper[4710]: I1009 10:07:02.911480 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-fkhwq_51378278-0202-4be1-96a8-28f4c81a6aae/control-plane-machine-set-operator/0.log" Oct 09 10:07:03 crc kubenswrapper[4710]: I1009 10:07:03.091573 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-2k94j_c3193888-6214-44cb-a0bc-0091046b80c2/kube-rbac-proxy/0.log" Oct 09 10:07:03 crc kubenswrapper[4710]: I1009 10:07:03.139120 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-2k94j_c3193888-6214-44cb-a0bc-0091046b80c2/machine-api-operator/0.log" Oct 09 10:07:11 crc kubenswrapper[4710]: I1009 10:07:11.814630 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:07:11 crc kubenswrapper[4710]: E1009 10:07:11.815378 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:07:15 crc kubenswrapper[4710]: I1009 10:07:15.924678 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-7mwtc_4a0738a0-347d-48d6-a47e-52e13d52664d/cert-manager-controller/0.log" Oct 09 10:07:16 crc kubenswrapper[4710]: I1009 10:07:16.028041 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-dx5ll_1631e6bf-cf27-4c97-ab40-8b2170648070/cert-manager-cainjector/0.log" Oct 09 10:07:16 crc kubenswrapper[4710]: I1009 10:07:16.158792 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-hgq5g_878c656c-1bab-4b97-8267-910f8890946a/cert-manager-webhook/0.log" Oct 09 10:07:23 crc kubenswrapper[4710]: I1009 10:07:23.815257 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:07:23 crc kubenswrapper[4710]: E1009 10:07:23.816038 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:07:27 crc kubenswrapper[4710]: I1009 10:07:27.193735 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-ptzlb_dd0f6b60-657a-4d34-a175-74f88f730669/nmstate-console-plugin/0.log" Oct 09 10:07:27 crc kubenswrapper[4710]: I1009 10:07:27.533346 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-bcx7j_0899a565-e72a-498c-9071-7b05ccb027bd/kube-rbac-proxy/0.log" Oct 09 10:07:27 crc kubenswrapper[4710]: I1009 10:07:27.538032 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-zcxkk_f174bd18-14b5-495f-8d34-795eca72dc06/nmstate-handler/0.log" Oct 09 10:07:27 crc kubenswrapper[4710]: I1009 10:07:27.575365 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-bcx7j_0899a565-e72a-498c-9071-7b05ccb027bd/nmstate-metrics/0.log" Oct 09 10:07:27 crc kubenswrapper[4710]: I1009 10:07:27.716447 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-c98j9_35322d9f-0b0d-40a7-b13c-7763f5027a59/nmstate-operator/0.log" Oct 09 10:07:27 crc kubenswrapper[4710]: I1009 10:07:27.778680 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-sfv4r_89eda8ab-752d-4dc4-af4a-009431208f96/nmstate-webhook/0.log" Oct 09 10:07:35 crc kubenswrapper[4710]: I1009 10:07:35.815692 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:07:35 crc kubenswrapper[4710]: E1009 10:07:35.816737 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:07:40 crc kubenswrapper[4710]: I1009 10:07:40.020755 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-fjp44_74b53642-04a4-4331-806b-c9f84d190746/kube-rbac-proxy/0.log" Oct 09 10:07:40 crc kubenswrapper[4710]: I1009 10:07:40.116068 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-fjp44_74b53642-04a4-4331-806b-c9f84d190746/controller/0.log" Oct 09 10:07:40 crc kubenswrapper[4710]: I1009 10:07:40.208947 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-frr-files/0.log" Oct 09 10:07:40 crc kubenswrapper[4710]: I1009 10:07:40.418748 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-frr-files/0.log" Oct 09 10:07:40 crc kubenswrapper[4710]: I1009 10:07:40.427098 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-metrics/0.log" Oct 09 10:07:40 crc kubenswrapper[4710]: I1009 10:07:40.479476 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-reloader/0.log" Oct 09 10:07:40 crc kubenswrapper[4710]: I1009 10:07:40.488858 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-reloader/0.log" Oct 09 10:07:40 crc kubenswrapper[4710]: I1009 10:07:40.644616 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-frr-files/0.log" Oct 09 10:07:40 crc kubenswrapper[4710]: I1009 10:07:40.699322 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-metrics/0.log" Oct 09 10:07:40 crc kubenswrapper[4710]: I1009 10:07:40.707165 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-reloader/0.log" Oct 09 10:07:40 crc kubenswrapper[4710]: I1009 10:07:40.728842 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-metrics/0.log" Oct 09 10:07:40 crc kubenswrapper[4710]: I1009 10:07:40.906570 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-frr-files/0.log" Oct 09 10:07:40 crc kubenswrapper[4710]: I1009 10:07:40.908255 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-metrics/0.log" Oct 09 10:07:40 crc kubenswrapper[4710]: I1009 10:07:40.926240 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-reloader/0.log" Oct 09 10:07:40 crc kubenswrapper[4710]: I1009 10:07:40.959396 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/controller/0.log" Oct 09 10:07:41 crc kubenswrapper[4710]: I1009 10:07:41.134374 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/frr-metrics/0.log" Oct 09 10:07:41 crc kubenswrapper[4710]: I1009 10:07:41.142062 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/kube-rbac-proxy/0.log" Oct 09 10:07:41 crc kubenswrapper[4710]: I1009 10:07:41.196993 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/kube-rbac-proxy-frr/0.log" Oct 09 10:07:41 crc kubenswrapper[4710]: I1009 10:07:41.401356 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-t2fhd_f03c49fa-bc71-4bac-b0bb-e25876b0cef7/frr-k8s-webhook-server/0.log" Oct 09 10:07:41 crc kubenswrapper[4710]: I1009 10:07:41.478615 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/reloader/0.log" Oct 09 10:07:41 crc kubenswrapper[4710]: I1009 10:07:41.660818 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7cb7c7d4ff-szp6n_95157129-8087-4ba4-9b97-980dc6f6d88d/manager/0.log" Oct 09 10:07:41 crc kubenswrapper[4710]: I1009 10:07:41.835193 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-86bdd9545f-gp42b_89f2b083-5809-4a5b-9c55-75bb2c0807a8/webhook-server/0.log" Oct 09 10:07:41 crc kubenswrapper[4710]: I1009 10:07:41.930268 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-6wvbb_f1dc3816-2a79-4e41-8337-bc61c3bbafc8/kube-rbac-proxy/0.log" Oct 09 10:07:42 crc kubenswrapper[4710]: I1009 10:07:42.345589 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/frr/0.log" Oct 09 10:07:42 crc kubenswrapper[4710]: I1009 10:07:42.461070 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-6wvbb_f1dc3816-2a79-4e41-8337-bc61c3bbafc8/speaker/0.log" Oct 09 10:07:50 crc kubenswrapper[4710]: I1009 10:07:50.815388 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:07:50 crc kubenswrapper[4710]: E1009 10:07:50.816097 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:07:52 crc kubenswrapper[4710]: I1009 10:07:52.553962 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7_372d2b61-0177-4956-bcab-23bee5bfd490/util/0.log" Oct 09 10:07:52 crc kubenswrapper[4710]: I1009 10:07:52.697690 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7_372d2b61-0177-4956-bcab-23bee5bfd490/util/0.log" Oct 09 10:07:52 crc kubenswrapper[4710]: I1009 10:07:52.703226 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7_372d2b61-0177-4956-bcab-23bee5bfd490/pull/0.log" Oct 09 10:07:52 crc kubenswrapper[4710]: I1009 10:07:52.734331 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7_372d2b61-0177-4956-bcab-23bee5bfd490/pull/0.log" Oct 09 10:07:52 crc kubenswrapper[4710]: I1009 10:07:52.957947 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7_372d2b61-0177-4956-bcab-23bee5bfd490/util/0.log" Oct 09 10:07:52 crc kubenswrapper[4710]: I1009 10:07:52.982784 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7_372d2b61-0177-4956-bcab-23bee5bfd490/extract/0.log" Oct 09 10:07:52 crc kubenswrapper[4710]: I1009 10:07:52.991585 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7_372d2b61-0177-4956-bcab-23bee5bfd490/pull/0.log" Oct 09 10:07:53 crc kubenswrapper[4710]: I1009 10:07:53.622973 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gfskr_f63018af-034e-4f0a-ab7e-fd508ba8bd25/extract-utilities/0.log" Oct 09 10:07:53 crc kubenswrapper[4710]: I1009 10:07:53.764081 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gfskr_f63018af-034e-4f0a-ab7e-fd508ba8bd25/extract-utilities/0.log" Oct 09 10:07:53 crc kubenswrapper[4710]: I1009 10:07:53.784688 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gfskr_f63018af-034e-4f0a-ab7e-fd508ba8bd25/extract-content/0.log" Oct 09 10:07:53 crc kubenswrapper[4710]: I1009 10:07:53.799298 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gfskr_f63018af-034e-4f0a-ab7e-fd508ba8bd25/extract-content/0.log" Oct 09 10:07:53 crc kubenswrapper[4710]: I1009 10:07:53.983388 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gfskr_f63018af-034e-4f0a-ab7e-fd508ba8bd25/extract-utilities/0.log" Oct 09 10:07:54 crc kubenswrapper[4710]: I1009 10:07:54.005456 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gfskr_f63018af-034e-4f0a-ab7e-fd508ba8bd25/extract-content/0.log" Oct 09 10:07:54 crc kubenswrapper[4710]: I1009 10:07:54.232353 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vqq8h_19faa1c0-84d5-4b45-969b-c9524eee6e56/extract-utilities/0.log" Oct 09 10:07:54 crc kubenswrapper[4710]: I1009 10:07:54.348883 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gfskr_f63018af-034e-4f0a-ab7e-fd508ba8bd25/registry-server/0.log" Oct 09 10:07:54 crc kubenswrapper[4710]: I1009 10:07:54.406460 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vqq8h_19faa1c0-84d5-4b45-969b-c9524eee6e56/extract-utilities/0.log" Oct 09 10:07:54 crc kubenswrapper[4710]: I1009 10:07:54.412580 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vqq8h_19faa1c0-84d5-4b45-969b-c9524eee6e56/extract-content/0.log" Oct 09 10:07:54 crc kubenswrapper[4710]: I1009 10:07:54.459573 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vqq8h_19faa1c0-84d5-4b45-969b-c9524eee6e56/extract-content/0.log" Oct 09 10:07:54 crc kubenswrapper[4710]: I1009 10:07:54.647567 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vqq8h_19faa1c0-84d5-4b45-969b-c9524eee6e56/extract-utilities/0.log" Oct 09 10:07:54 crc kubenswrapper[4710]: I1009 10:07:54.745789 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vqq8h_19faa1c0-84d5-4b45-969b-c9524eee6e56/extract-content/0.log" Oct 09 10:07:55 crc kubenswrapper[4710]: I1009 10:07:55.190571 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_98d5ef90-e17a-4c38-b665-ad2311e7b3b1/util/0.log" Oct 09 10:07:55 crc kubenswrapper[4710]: I1009 10:07:55.191853 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vqq8h_19faa1c0-84d5-4b45-969b-c9524eee6e56/registry-server/0.log" Oct 09 10:07:55 crc kubenswrapper[4710]: I1009 10:07:55.313750 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_98d5ef90-e17a-4c38-b665-ad2311e7b3b1/util/0.log" Oct 09 10:07:55 crc kubenswrapper[4710]: I1009 10:07:55.322204 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_98d5ef90-e17a-4c38-b665-ad2311e7b3b1/pull/0.log" Oct 09 10:07:55 crc kubenswrapper[4710]: I1009 10:07:55.362069 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_98d5ef90-e17a-4c38-b665-ad2311e7b3b1/pull/0.log" Oct 09 10:07:55 crc kubenswrapper[4710]: I1009 10:07:55.532958 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_98d5ef90-e17a-4c38-b665-ad2311e7b3b1/extract/0.log" Oct 09 10:07:55 crc kubenswrapper[4710]: I1009 10:07:55.537825 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_98d5ef90-e17a-4c38-b665-ad2311e7b3b1/util/0.log" Oct 09 10:07:55 crc kubenswrapper[4710]: I1009 10:07:55.557104 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_98d5ef90-e17a-4c38-b665-ad2311e7b3b1/pull/0.log" Oct 09 10:07:55 crc kubenswrapper[4710]: I1009 10:07:55.689411 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-mf25c_0f52acde-7961-4866-8e50-2d6839085e4d/marketplace-operator/0.log" Oct 09 10:07:55 crc kubenswrapper[4710]: I1009 10:07:55.737207 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rtjsg_b6750787-1707-42c3-9e91-949cfcf33699/extract-utilities/0.log" Oct 09 10:07:55 crc kubenswrapper[4710]: I1009 10:07:55.894974 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rtjsg_b6750787-1707-42c3-9e91-949cfcf33699/extract-content/0.log" Oct 09 10:07:55 crc kubenswrapper[4710]: I1009 10:07:55.900581 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rtjsg_b6750787-1707-42c3-9e91-949cfcf33699/extract-content/0.log" Oct 09 10:07:55 crc kubenswrapper[4710]: I1009 10:07:55.914166 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rtjsg_b6750787-1707-42c3-9e91-949cfcf33699/extract-utilities/0.log" Oct 09 10:07:56 crc kubenswrapper[4710]: I1009 10:07:56.069889 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rtjsg_b6750787-1707-42c3-9e91-949cfcf33699/extract-utilities/0.log" Oct 09 10:07:56 crc kubenswrapper[4710]: I1009 10:07:56.107078 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rtjsg_b6750787-1707-42c3-9e91-949cfcf33699/extract-content/0.log" Oct 09 10:07:56 crc kubenswrapper[4710]: I1009 10:07:56.164382 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-97w7k_9cbe3f65-19a3-4145-94a8-4434ee92178f/extract-utilities/0.log" Oct 09 10:07:56 crc kubenswrapper[4710]: I1009 10:07:56.198110 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rtjsg_b6750787-1707-42c3-9e91-949cfcf33699/registry-server/0.log" Oct 09 10:07:56 crc kubenswrapper[4710]: I1009 10:07:56.334314 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-97w7k_9cbe3f65-19a3-4145-94a8-4434ee92178f/extract-utilities/0.log" Oct 09 10:07:56 crc kubenswrapper[4710]: I1009 10:07:56.339387 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-97w7k_9cbe3f65-19a3-4145-94a8-4434ee92178f/extract-content/0.log" Oct 09 10:07:56 crc kubenswrapper[4710]: I1009 10:07:56.403938 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-97w7k_9cbe3f65-19a3-4145-94a8-4434ee92178f/extract-content/0.log" Oct 09 10:07:56 crc kubenswrapper[4710]: I1009 10:07:56.555389 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-97w7k_9cbe3f65-19a3-4145-94a8-4434ee92178f/extract-content/0.log" Oct 09 10:07:56 crc kubenswrapper[4710]: I1009 10:07:56.584602 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-97w7k_9cbe3f65-19a3-4145-94a8-4434ee92178f/extract-utilities/0.log" Oct 09 10:07:57 crc kubenswrapper[4710]: I1009 10:07:57.029077 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-97w7k_9cbe3f65-19a3-4145-94a8-4434ee92178f/registry-server/0.log" Oct 09 10:08:03 crc kubenswrapper[4710]: I1009 10:08:03.814869 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:08:03 crc kubenswrapper[4710]: E1009 10:08:03.816275 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:08:15 crc kubenswrapper[4710]: I1009 10:08:15.816175 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:08:15 crc kubenswrapper[4710]: E1009 10:08:15.816987 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:08:25 crc kubenswrapper[4710]: E1009 10:08:25.615437 4710 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 192.168.26.166:57922->192.168.26.166:38571: read tcp 192.168.26.166:57922->192.168.26.166:38571: read: connection reset by peer Oct 09 10:08:29 crc kubenswrapper[4710]: I1009 10:08:29.815141 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:08:29 crc kubenswrapper[4710]: E1009 10:08:29.816026 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:08:43 crc kubenswrapper[4710]: I1009 10:08:43.815038 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:08:43 crc kubenswrapper[4710]: E1009 10:08:43.815630 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:08:54 crc kubenswrapper[4710]: I1009 10:08:54.814545 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:08:54 crc kubenswrapper[4710]: E1009 10:08:54.815099 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:09:07 crc kubenswrapper[4710]: I1009 10:09:07.814784 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:09:07 crc kubenswrapper[4710]: E1009 10:09:07.815374 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:09:22 crc kubenswrapper[4710]: I1009 10:09:22.818185 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:09:22 crc kubenswrapper[4710]: E1009 10:09:22.818667 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:09:33 crc kubenswrapper[4710]: I1009 10:09:33.815034 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:09:33 crc kubenswrapper[4710]: E1009 10:09:33.815850 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:09:38 crc kubenswrapper[4710]: I1009 10:09:38.946874 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mmcbx"] Oct 09 10:09:38 crc kubenswrapper[4710]: E1009 10:09:38.947759 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cdc3c34-1b98-4333-b92b-3925df2a36e2" containerName="container-00" Oct 09 10:09:38 crc kubenswrapper[4710]: I1009 10:09:38.947773 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cdc3c34-1b98-4333-b92b-3925df2a36e2" containerName="container-00" Oct 09 10:09:38 crc kubenswrapper[4710]: I1009 10:09:38.947930 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cdc3c34-1b98-4333-b92b-3925df2a36e2" containerName="container-00" Oct 09 10:09:38 crc kubenswrapper[4710]: I1009 10:09:38.949132 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mmcbx" Oct 09 10:09:39 crc kubenswrapper[4710]: I1009 10:09:39.005167 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mmcbx"] Oct 09 10:09:39 crc kubenswrapper[4710]: I1009 10:09:39.074495 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mx5hz\" (UniqueName: \"kubernetes.io/projected/94b93997-c92a-41fa-8f59-e5422af55007-kube-api-access-mx5hz\") pod \"redhat-marketplace-mmcbx\" (UID: \"94b93997-c92a-41fa-8f59-e5422af55007\") " pod="openshift-marketplace/redhat-marketplace-mmcbx" Oct 09 10:09:39 crc kubenswrapper[4710]: I1009 10:09:39.074559 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94b93997-c92a-41fa-8f59-e5422af55007-utilities\") pod \"redhat-marketplace-mmcbx\" (UID: \"94b93997-c92a-41fa-8f59-e5422af55007\") " pod="openshift-marketplace/redhat-marketplace-mmcbx" Oct 09 10:09:39 crc kubenswrapper[4710]: I1009 10:09:39.074682 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94b93997-c92a-41fa-8f59-e5422af55007-catalog-content\") pod \"redhat-marketplace-mmcbx\" (UID: \"94b93997-c92a-41fa-8f59-e5422af55007\") " pod="openshift-marketplace/redhat-marketplace-mmcbx" Oct 09 10:09:39 crc kubenswrapper[4710]: I1009 10:09:39.176354 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94b93997-c92a-41fa-8f59-e5422af55007-utilities\") pod \"redhat-marketplace-mmcbx\" (UID: \"94b93997-c92a-41fa-8f59-e5422af55007\") " pod="openshift-marketplace/redhat-marketplace-mmcbx" Oct 09 10:09:39 crc kubenswrapper[4710]: I1009 10:09:39.176486 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94b93997-c92a-41fa-8f59-e5422af55007-catalog-content\") pod \"redhat-marketplace-mmcbx\" (UID: \"94b93997-c92a-41fa-8f59-e5422af55007\") " pod="openshift-marketplace/redhat-marketplace-mmcbx" Oct 09 10:09:39 crc kubenswrapper[4710]: I1009 10:09:39.176556 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mx5hz\" (UniqueName: \"kubernetes.io/projected/94b93997-c92a-41fa-8f59-e5422af55007-kube-api-access-mx5hz\") pod \"redhat-marketplace-mmcbx\" (UID: \"94b93997-c92a-41fa-8f59-e5422af55007\") " pod="openshift-marketplace/redhat-marketplace-mmcbx" Oct 09 10:09:39 crc kubenswrapper[4710]: I1009 10:09:39.176827 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94b93997-c92a-41fa-8f59-e5422af55007-utilities\") pod \"redhat-marketplace-mmcbx\" (UID: \"94b93997-c92a-41fa-8f59-e5422af55007\") " pod="openshift-marketplace/redhat-marketplace-mmcbx" Oct 09 10:09:39 crc kubenswrapper[4710]: I1009 10:09:39.176940 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94b93997-c92a-41fa-8f59-e5422af55007-catalog-content\") pod \"redhat-marketplace-mmcbx\" (UID: \"94b93997-c92a-41fa-8f59-e5422af55007\") " pod="openshift-marketplace/redhat-marketplace-mmcbx" Oct 09 10:09:39 crc kubenswrapper[4710]: I1009 10:09:39.197260 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mx5hz\" (UniqueName: \"kubernetes.io/projected/94b93997-c92a-41fa-8f59-e5422af55007-kube-api-access-mx5hz\") pod \"redhat-marketplace-mmcbx\" (UID: \"94b93997-c92a-41fa-8f59-e5422af55007\") " pod="openshift-marketplace/redhat-marketplace-mmcbx" Oct 09 10:09:39 crc kubenswrapper[4710]: I1009 10:09:39.265945 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mmcbx" Oct 09 10:09:39 crc kubenswrapper[4710]: I1009 10:09:39.853340 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mmcbx"] Oct 09 10:09:40 crc kubenswrapper[4710]: I1009 10:09:40.633120 4710 generic.go:334] "Generic (PLEG): container finished" podID="94b93997-c92a-41fa-8f59-e5422af55007" containerID="2075345cc3c6d6501690f29b1128d8fa9966046e6573c7b2c8d40988e2483820" exitCode=0 Oct 09 10:09:40 crc kubenswrapper[4710]: I1009 10:09:40.633211 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mmcbx" event={"ID":"94b93997-c92a-41fa-8f59-e5422af55007","Type":"ContainerDied","Data":"2075345cc3c6d6501690f29b1128d8fa9966046e6573c7b2c8d40988e2483820"} Oct 09 10:09:40 crc kubenswrapper[4710]: I1009 10:09:40.633405 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mmcbx" event={"ID":"94b93997-c92a-41fa-8f59-e5422af55007","Type":"ContainerStarted","Data":"07c4111a5c8d8d20460b3d243eca409f1f060452b21e7605f61f7563a581f466"} Oct 09 10:09:40 crc kubenswrapper[4710]: I1009 10:09:40.635344 4710 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 09 10:09:41 crc kubenswrapper[4710]: I1009 10:09:41.642227 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mmcbx" event={"ID":"94b93997-c92a-41fa-8f59-e5422af55007","Type":"ContainerStarted","Data":"7ab2033b18711951870c69eef5eb528b320207c0f91a966b31af06e80bfb3b45"} Oct 09 10:09:41 crc kubenswrapper[4710]: I1009 10:09:41.645200 4710 generic.go:334] "Generic (PLEG): container finished" podID="3a51086d-64d6-4b48-96e8-2d2015392224" containerID="80be40503dcb87a8f2d96132c02b4cf1de1444dec1ec891626553fce7ec939ba" exitCode=0 Oct 09 10:09:41 crc kubenswrapper[4710]: I1009 10:09:41.645250 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gqx5w/must-gather-sx4kd" event={"ID":"3a51086d-64d6-4b48-96e8-2d2015392224","Type":"ContainerDied","Data":"80be40503dcb87a8f2d96132c02b4cf1de1444dec1ec891626553fce7ec939ba"} Oct 09 10:09:41 crc kubenswrapper[4710]: I1009 10:09:41.645780 4710 scope.go:117] "RemoveContainer" containerID="80be40503dcb87a8f2d96132c02b4cf1de1444dec1ec891626553fce7ec939ba" Oct 09 10:09:41 crc kubenswrapper[4710]: I1009 10:09:41.981788 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-gqx5w_must-gather-sx4kd_3a51086d-64d6-4b48-96e8-2d2015392224/gather/0.log" Oct 09 10:09:42 crc kubenswrapper[4710]: I1009 10:09:42.656379 4710 generic.go:334] "Generic (PLEG): container finished" podID="94b93997-c92a-41fa-8f59-e5422af55007" containerID="7ab2033b18711951870c69eef5eb528b320207c0f91a966b31af06e80bfb3b45" exitCode=0 Oct 09 10:09:42 crc kubenswrapper[4710]: I1009 10:09:42.656606 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mmcbx" event={"ID":"94b93997-c92a-41fa-8f59-e5422af55007","Type":"ContainerDied","Data":"7ab2033b18711951870c69eef5eb528b320207c0f91a966b31af06e80bfb3b45"} Oct 09 10:09:43 crc kubenswrapper[4710]: I1009 10:09:43.667353 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mmcbx" event={"ID":"94b93997-c92a-41fa-8f59-e5422af55007","Type":"ContainerStarted","Data":"bc7faaea00e51f5d01cff80bc3e988fbd659713be767a1ae6de648b6b0d99779"} Oct 09 10:09:43 crc kubenswrapper[4710]: I1009 10:09:43.689724 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mmcbx" podStartSLOduration=3.228885623 podStartE2EDuration="5.689709416s" podCreationTimestamp="2025-10-09 10:09:38 +0000 UTC" firstStartedPulling="2025-10-09 10:09:40.635115597 +0000 UTC m=+3904.125223994" lastFinishedPulling="2025-10-09 10:09:43.09593939 +0000 UTC m=+3906.586047787" observedRunningTime="2025-10-09 10:09:43.683736352 +0000 UTC m=+3907.173844749" watchObservedRunningTime="2025-10-09 10:09:43.689709416 +0000 UTC m=+3907.179817813" Oct 09 10:09:45 crc kubenswrapper[4710]: I1009 10:09:45.816069 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:09:46 crc kubenswrapper[4710]: I1009 10:09:46.690846 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerStarted","Data":"4f3bd091b62a5d6a8bcde1dece4c98d7c1dbcf0291047b04b5db8b6a2857e401"} Oct 09 10:09:49 crc kubenswrapper[4710]: I1009 10:09:49.266968 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mmcbx" Oct 09 10:09:49 crc kubenswrapper[4710]: I1009 10:09:49.267391 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mmcbx" Oct 09 10:09:49 crc kubenswrapper[4710]: I1009 10:09:49.306416 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mmcbx" Oct 09 10:09:49 crc kubenswrapper[4710]: I1009 10:09:49.761894 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mmcbx" Oct 09 10:09:49 crc kubenswrapper[4710]: I1009 10:09:49.806251 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mmcbx"] Oct 09 10:09:50 crc kubenswrapper[4710]: I1009 10:09:50.277849 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-gqx5w/must-gather-sx4kd"] Oct 09 10:09:50 crc kubenswrapper[4710]: I1009 10:09:50.279229 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-gqx5w/must-gather-sx4kd" podUID="3a51086d-64d6-4b48-96e8-2d2015392224" containerName="copy" containerID="cri-o://e734ff99b15a31351d67c5a557f7c3f5f61b590037e029b23679f4bc9a27ae4f" gracePeriod=2 Oct 09 10:09:50 crc kubenswrapper[4710]: I1009 10:09:50.285945 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-gqx5w/must-gather-sx4kd"] Oct 09 10:09:50 crc kubenswrapper[4710]: I1009 10:09:50.733000 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-gqx5w_must-gather-sx4kd_3a51086d-64d6-4b48-96e8-2d2015392224/copy/0.log" Oct 09 10:09:50 crc kubenswrapper[4710]: I1009 10:09:50.733653 4710 generic.go:334] "Generic (PLEG): container finished" podID="3a51086d-64d6-4b48-96e8-2d2015392224" containerID="e734ff99b15a31351d67c5a557f7c3f5f61b590037e029b23679f4bc9a27ae4f" exitCode=143 Oct 09 10:09:50 crc kubenswrapper[4710]: I1009 10:09:50.734418 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="56ece4f8e59fad98c53891e767e030829e0d73093ff3ffd44c186fb5ed664320" Oct 09 10:09:50 crc kubenswrapper[4710]: I1009 10:09:50.743744 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-gqx5w_must-gather-sx4kd_3a51086d-64d6-4b48-96e8-2d2015392224/copy/0.log" Oct 09 10:09:50 crc kubenswrapper[4710]: I1009 10:09:50.744403 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gqx5w/must-gather-sx4kd" Oct 09 10:09:50 crc kubenswrapper[4710]: I1009 10:09:50.834837 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3a51086d-64d6-4b48-96e8-2d2015392224-must-gather-output\") pod \"3a51086d-64d6-4b48-96e8-2d2015392224\" (UID: \"3a51086d-64d6-4b48-96e8-2d2015392224\") " Oct 09 10:09:50 crc kubenswrapper[4710]: I1009 10:09:50.834994 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5dh82\" (UniqueName: \"kubernetes.io/projected/3a51086d-64d6-4b48-96e8-2d2015392224-kube-api-access-5dh82\") pod \"3a51086d-64d6-4b48-96e8-2d2015392224\" (UID: \"3a51086d-64d6-4b48-96e8-2d2015392224\") " Oct 09 10:09:50 crc kubenswrapper[4710]: I1009 10:09:50.839646 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a51086d-64d6-4b48-96e8-2d2015392224-kube-api-access-5dh82" (OuterVolumeSpecName: "kube-api-access-5dh82") pod "3a51086d-64d6-4b48-96e8-2d2015392224" (UID: "3a51086d-64d6-4b48-96e8-2d2015392224"). InnerVolumeSpecName "kube-api-access-5dh82". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 10:09:50 crc kubenswrapper[4710]: I1009 10:09:50.938405 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5dh82\" (UniqueName: \"kubernetes.io/projected/3a51086d-64d6-4b48-96e8-2d2015392224-kube-api-access-5dh82\") on node \"crc\" DevicePath \"\"" Oct 09 10:09:50 crc kubenswrapper[4710]: I1009 10:09:50.977531 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a51086d-64d6-4b48-96e8-2d2015392224-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "3a51086d-64d6-4b48-96e8-2d2015392224" (UID: "3a51086d-64d6-4b48-96e8-2d2015392224"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 10:09:51 crc kubenswrapper[4710]: I1009 10:09:51.040413 4710 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3a51086d-64d6-4b48-96e8-2d2015392224-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 09 10:09:51 crc kubenswrapper[4710]: I1009 10:09:51.740037 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gqx5w/must-gather-sx4kd" Oct 09 10:09:51 crc kubenswrapper[4710]: I1009 10:09:51.740180 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mmcbx" podUID="94b93997-c92a-41fa-8f59-e5422af55007" containerName="registry-server" containerID="cri-o://bc7faaea00e51f5d01cff80bc3e988fbd659713be767a1ae6de648b6b0d99779" gracePeriod=2 Oct 09 10:09:52 crc kubenswrapper[4710]: I1009 10:09:52.311251 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mmcbx" Oct 09 10:09:52 crc kubenswrapper[4710]: I1009 10:09:52.364154 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94b93997-c92a-41fa-8f59-e5422af55007-catalog-content\") pod \"94b93997-c92a-41fa-8f59-e5422af55007\" (UID: \"94b93997-c92a-41fa-8f59-e5422af55007\") " Oct 09 10:09:52 crc kubenswrapper[4710]: I1009 10:09:52.364366 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94b93997-c92a-41fa-8f59-e5422af55007-utilities\") pod \"94b93997-c92a-41fa-8f59-e5422af55007\" (UID: \"94b93997-c92a-41fa-8f59-e5422af55007\") " Oct 09 10:09:52 crc kubenswrapper[4710]: I1009 10:09:52.364413 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mx5hz\" (UniqueName: \"kubernetes.io/projected/94b93997-c92a-41fa-8f59-e5422af55007-kube-api-access-mx5hz\") pod \"94b93997-c92a-41fa-8f59-e5422af55007\" (UID: \"94b93997-c92a-41fa-8f59-e5422af55007\") " Oct 09 10:09:52 crc kubenswrapper[4710]: I1009 10:09:52.365307 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94b93997-c92a-41fa-8f59-e5422af55007-utilities" (OuterVolumeSpecName: "utilities") pod "94b93997-c92a-41fa-8f59-e5422af55007" (UID: "94b93997-c92a-41fa-8f59-e5422af55007"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 10:09:52 crc kubenswrapper[4710]: I1009 10:09:52.366127 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/94b93997-c92a-41fa-8f59-e5422af55007-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 10:09:52 crc kubenswrapper[4710]: I1009 10:09:52.369180 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94b93997-c92a-41fa-8f59-e5422af55007-kube-api-access-mx5hz" (OuterVolumeSpecName: "kube-api-access-mx5hz") pod "94b93997-c92a-41fa-8f59-e5422af55007" (UID: "94b93997-c92a-41fa-8f59-e5422af55007"). InnerVolumeSpecName "kube-api-access-mx5hz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 10:09:52 crc kubenswrapper[4710]: I1009 10:09:52.374935 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94b93997-c92a-41fa-8f59-e5422af55007-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "94b93997-c92a-41fa-8f59-e5422af55007" (UID: "94b93997-c92a-41fa-8f59-e5422af55007"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 10:09:52 crc kubenswrapper[4710]: I1009 10:09:52.468438 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mx5hz\" (UniqueName: \"kubernetes.io/projected/94b93997-c92a-41fa-8f59-e5422af55007-kube-api-access-mx5hz\") on node \"crc\" DevicePath \"\"" Oct 09 10:09:52 crc kubenswrapper[4710]: I1009 10:09:52.468470 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/94b93997-c92a-41fa-8f59-e5422af55007-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 10:09:52 crc kubenswrapper[4710]: I1009 10:09:52.750450 4710 generic.go:334] "Generic (PLEG): container finished" podID="94b93997-c92a-41fa-8f59-e5422af55007" containerID="bc7faaea00e51f5d01cff80bc3e988fbd659713be767a1ae6de648b6b0d99779" exitCode=0 Oct 09 10:09:52 crc kubenswrapper[4710]: I1009 10:09:52.750489 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mmcbx" event={"ID":"94b93997-c92a-41fa-8f59-e5422af55007","Type":"ContainerDied","Data":"bc7faaea00e51f5d01cff80bc3e988fbd659713be767a1ae6de648b6b0d99779"} Oct 09 10:09:52 crc kubenswrapper[4710]: I1009 10:09:52.750513 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mmcbx" event={"ID":"94b93997-c92a-41fa-8f59-e5422af55007","Type":"ContainerDied","Data":"07c4111a5c8d8d20460b3d243eca409f1f060452b21e7605f61f7563a581f466"} Oct 09 10:09:52 crc kubenswrapper[4710]: I1009 10:09:52.750528 4710 scope.go:117] "RemoveContainer" containerID="bc7faaea00e51f5d01cff80bc3e988fbd659713be767a1ae6de648b6b0d99779" Oct 09 10:09:52 crc kubenswrapper[4710]: I1009 10:09:52.750646 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mmcbx" Oct 09 10:09:52 crc kubenswrapper[4710]: I1009 10:09:52.778269 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mmcbx"] Oct 09 10:09:52 crc kubenswrapper[4710]: I1009 10:09:52.783947 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mmcbx"] Oct 09 10:09:52 crc kubenswrapper[4710]: I1009 10:09:52.789065 4710 scope.go:117] "RemoveContainer" containerID="7ab2033b18711951870c69eef5eb528b320207c0f91a966b31af06e80bfb3b45" Oct 09 10:09:52 crc kubenswrapper[4710]: I1009 10:09:52.824517 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a51086d-64d6-4b48-96e8-2d2015392224" path="/var/lib/kubelet/pods/3a51086d-64d6-4b48-96e8-2d2015392224/volumes" Oct 09 10:09:52 crc kubenswrapper[4710]: I1009 10:09:52.825132 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94b93997-c92a-41fa-8f59-e5422af55007" path="/var/lib/kubelet/pods/94b93997-c92a-41fa-8f59-e5422af55007/volumes" Oct 09 10:09:53 crc kubenswrapper[4710]: I1009 10:09:53.190661 4710 scope.go:117] "RemoveContainer" containerID="2075345cc3c6d6501690f29b1128d8fa9966046e6573c7b2c8d40988e2483820" Oct 09 10:09:53 crc kubenswrapper[4710]: I1009 10:09:53.205789 4710 scope.go:117] "RemoveContainer" containerID="bc7faaea00e51f5d01cff80bc3e988fbd659713be767a1ae6de648b6b0d99779" Oct 09 10:09:53 crc kubenswrapper[4710]: E1009 10:09:53.208540 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc7faaea00e51f5d01cff80bc3e988fbd659713be767a1ae6de648b6b0d99779\": container with ID starting with bc7faaea00e51f5d01cff80bc3e988fbd659713be767a1ae6de648b6b0d99779 not found: ID does not exist" containerID="bc7faaea00e51f5d01cff80bc3e988fbd659713be767a1ae6de648b6b0d99779" Oct 09 10:09:53 crc kubenswrapper[4710]: I1009 10:09:53.208570 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc7faaea00e51f5d01cff80bc3e988fbd659713be767a1ae6de648b6b0d99779"} err="failed to get container status \"bc7faaea00e51f5d01cff80bc3e988fbd659713be767a1ae6de648b6b0d99779\": rpc error: code = NotFound desc = could not find container \"bc7faaea00e51f5d01cff80bc3e988fbd659713be767a1ae6de648b6b0d99779\": container with ID starting with bc7faaea00e51f5d01cff80bc3e988fbd659713be767a1ae6de648b6b0d99779 not found: ID does not exist" Oct 09 10:09:53 crc kubenswrapper[4710]: I1009 10:09:53.208592 4710 scope.go:117] "RemoveContainer" containerID="7ab2033b18711951870c69eef5eb528b320207c0f91a966b31af06e80bfb3b45" Oct 09 10:09:53 crc kubenswrapper[4710]: E1009 10:09:53.208837 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ab2033b18711951870c69eef5eb528b320207c0f91a966b31af06e80bfb3b45\": container with ID starting with 7ab2033b18711951870c69eef5eb528b320207c0f91a966b31af06e80bfb3b45 not found: ID does not exist" containerID="7ab2033b18711951870c69eef5eb528b320207c0f91a966b31af06e80bfb3b45" Oct 09 10:09:53 crc kubenswrapper[4710]: I1009 10:09:53.208877 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ab2033b18711951870c69eef5eb528b320207c0f91a966b31af06e80bfb3b45"} err="failed to get container status \"7ab2033b18711951870c69eef5eb528b320207c0f91a966b31af06e80bfb3b45\": rpc error: code = NotFound desc = could not find container \"7ab2033b18711951870c69eef5eb528b320207c0f91a966b31af06e80bfb3b45\": container with ID starting with 7ab2033b18711951870c69eef5eb528b320207c0f91a966b31af06e80bfb3b45 not found: ID does not exist" Oct 09 10:09:53 crc kubenswrapper[4710]: I1009 10:09:53.208900 4710 scope.go:117] "RemoveContainer" containerID="2075345cc3c6d6501690f29b1128d8fa9966046e6573c7b2c8d40988e2483820" Oct 09 10:09:53 crc kubenswrapper[4710]: E1009 10:09:53.209173 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2075345cc3c6d6501690f29b1128d8fa9966046e6573c7b2c8d40988e2483820\": container with ID starting with 2075345cc3c6d6501690f29b1128d8fa9966046e6573c7b2c8d40988e2483820 not found: ID does not exist" containerID="2075345cc3c6d6501690f29b1128d8fa9966046e6573c7b2c8d40988e2483820" Oct 09 10:09:53 crc kubenswrapper[4710]: I1009 10:09:53.209210 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2075345cc3c6d6501690f29b1128d8fa9966046e6573c7b2c8d40988e2483820"} err="failed to get container status \"2075345cc3c6d6501690f29b1128d8fa9966046e6573c7b2c8d40988e2483820\": rpc error: code = NotFound desc = could not find container \"2075345cc3c6d6501690f29b1128d8fa9966046e6573c7b2c8d40988e2483820\": container with ID starting with 2075345cc3c6d6501690f29b1128d8fa9966046e6573c7b2c8d40988e2483820 not found: ID does not exist" Oct 09 10:10:33 crc kubenswrapper[4710]: I1009 10:10:33.228044 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-bvn9v/must-gather-7xvz7"] Oct 09 10:10:33 crc kubenswrapper[4710]: E1009 10:10:33.228704 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94b93997-c92a-41fa-8f59-e5422af55007" containerName="extract-content" Oct 09 10:10:33 crc kubenswrapper[4710]: I1009 10:10:33.228717 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="94b93997-c92a-41fa-8f59-e5422af55007" containerName="extract-content" Oct 09 10:10:33 crc kubenswrapper[4710]: E1009 10:10:33.228735 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a51086d-64d6-4b48-96e8-2d2015392224" containerName="gather" Oct 09 10:10:33 crc kubenswrapper[4710]: I1009 10:10:33.228740 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a51086d-64d6-4b48-96e8-2d2015392224" containerName="gather" Oct 09 10:10:33 crc kubenswrapper[4710]: E1009 10:10:33.228753 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94b93997-c92a-41fa-8f59-e5422af55007" containerName="registry-server" Oct 09 10:10:33 crc kubenswrapper[4710]: I1009 10:10:33.228758 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="94b93997-c92a-41fa-8f59-e5422af55007" containerName="registry-server" Oct 09 10:10:33 crc kubenswrapper[4710]: E1009 10:10:33.228770 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a51086d-64d6-4b48-96e8-2d2015392224" containerName="copy" Oct 09 10:10:33 crc kubenswrapper[4710]: I1009 10:10:33.228775 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a51086d-64d6-4b48-96e8-2d2015392224" containerName="copy" Oct 09 10:10:33 crc kubenswrapper[4710]: E1009 10:10:33.228794 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94b93997-c92a-41fa-8f59-e5422af55007" containerName="extract-utilities" Oct 09 10:10:33 crc kubenswrapper[4710]: I1009 10:10:33.228799 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="94b93997-c92a-41fa-8f59-e5422af55007" containerName="extract-utilities" Oct 09 10:10:33 crc kubenswrapper[4710]: I1009 10:10:33.228943 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a51086d-64d6-4b48-96e8-2d2015392224" containerName="copy" Oct 09 10:10:33 crc kubenswrapper[4710]: I1009 10:10:33.228952 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a51086d-64d6-4b48-96e8-2d2015392224" containerName="gather" Oct 09 10:10:33 crc kubenswrapper[4710]: I1009 10:10:33.228959 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="94b93997-c92a-41fa-8f59-e5422af55007" containerName="registry-server" Oct 09 10:10:33 crc kubenswrapper[4710]: I1009 10:10:33.229802 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bvn9v/must-gather-7xvz7" Oct 09 10:10:33 crc kubenswrapper[4710]: I1009 10:10:33.231511 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-bvn9v"/"default-dockercfg-78t5k" Oct 09 10:10:33 crc kubenswrapper[4710]: I1009 10:10:33.232102 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-bvn9v"/"kube-root-ca.crt" Oct 09 10:10:33 crc kubenswrapper[4710]: I1009 10:10:33.232349 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-bvn9v"/"openshift-service-ca.crt" Oct 09 10:10:33 crc kubenswrapper[4710]: I1009 10:10:33.272782 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3c64da38-182d-4f50-bbc4-d97effae13c4-must-gather-output\") pod \"must-gather-7xvz7\" (UID: \"3c64da38-182d-4f50-bbc4-d97effae13c4\") " pod="openshift-must-gather-bvn9v/must-gather-7xvz7" Oct 09 10:10:33 crc kubenswrapper[4710]: I1009 10:10:33.272871 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7g6pw\" (UniqueName: \"kubernetes.io/projected/3c64da38-182d-4f50-bbc4-d97effae13c4-kube-api-access-7g6pw\") pod \"must-gather-7xvz7\" (UID: \"3c64da38-182d-4f50-bbc4-d97effae13c4\") " pod="openshift-must-gather-bvn9v/must-gather-7xvz7" Oct 09 10:10:33 crc kubenswrapper[4710]: I1009 10:10:33.275523 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-bvn9v/must-gather-7xvz7"] Oct 09 10:10:33 crc kubenswrapper[4710]: I1009 10:10:33.374340 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3c64da38-182d-4f50-bbc4-d97effae13c4-must-gather-output\") pod \"must-gather-7xvz7\" (UID: \"3c64da38-182d-4f50-bbc4-d97effae13c4\") " pod="openshift-must-gather-bvn9v/must-gather-7xvz7" Oct 09 10:10:33 crc kubenswrapper[4710]: I1009 10:10:33.374447 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7g6pw\" (UniqueName: \"kubernetes.io/projected/3c64da38-182d-4f50-bbc4-d97effae13c4-kube-api-access-7g6pw\") pod \"must-gather-7xvz7\" (UID: \"3c64da38-182d-4f50-bbc4-d97effae13c4\") " pod="openshift-must-gather-bvn9v/must-gather-7xvz7" Oct 09 10:10:33 crc kubenswrapper[4710]: I1009 10:10:33.374760 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3c64da38-182d-4f50-bbc4-d97effae13c4-must-gather-output\") pod \"must-gather-7xvz7\" (UID: \"3c64da38-182d-4f50-bbc4-d97effae13c4\") " pod="openshift-must-gather-bvn9v/must-gather-7xvz7" Oct 09 10:10:33 crc kubenswrapper[4710]: I1009 10:10:33.390998 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7g6pw\" (UniqueName: \"kubernetes.io/projected/3c64da38-182d-4f50-bbc4-d97effae13c4-kube-api-access-7g6pw\") pod \"must-gather-7xvz7\" (UID: \"3c64da38-182d-4f50-bbc4-d97effae13c4\") " pod="openshift-must-gather-bvn9v/must-gather-7xvz7" Oct 09 10:10:33 crc kubenswrapper[4710]: I1009 10:10:33.545285 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bvn9v/must-gather-7xvz7" Oct 09 10:10:34 crc kubenswrapper[4710]: I1009 10:10:34.098204 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-bvn9v/must-gather-7xvz7"] Oct 09 10:10:35 crc kubenswrapper[4710]: I1009 10:10:35.018186 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bvn9v/must-gather-7xvz7" event={"ID":"3c64da38-182d-4f50-bbc4-d97effae13c4","Type":"ContainerStarted","Data":"86b4bfca505259000838f9cb023f1426401b0bb7b8e496a4160b4ac146c76208"} Oct 09 10:10:35 crc kubenswrapper[4710]: I1009 10:10:35.018623 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bvn9v/must-gather-7xvz7" event={"ID":"3c64da38-182d-4f50-bbc4-d97effae13c4","Type":"ContainerStarted","Data":"fa46ca85dc15060df8a048fa3a55f9ae3a5b5baf9dc6ba23c3502625111e290b"} Oct 09 10:10:35 crc kubenswrapper[4710]: I1009 10:10:35.018636 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bvn9v/must-gather-7xvz7" event={"ID":"3c64da38-182d-4f50-bbc4-d97effae13c4","Type":"ContainerStarted","Data":"309aae42c46d1f41e21d8ceff748b30e0c41fc2ba49303271b85ca437fe40270"} Oct 09 10:10:35 crc kubenswrapper[4710]: I1009 10:10:35.036322 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-bvn9v/must-gather-7xvz7" podStartSLOduration=2.036291527 podStartE2EDuration="2.036291527s" podCreationTimestamp="2025-10-09 10:10:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 10:10:35.033947047 +0000 UTC m=+3958.524055444" watchObservedRunningTime="2025-10-09 10:10:35.036291527 +0000 UTC m=+3958.526399924" Oct 09 10:10:37 crc kubenswrapper[4710]: I1009 10:10:37.836853 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-bvn9v/crc-debug-tnnzr"] Oct 09 10:10:37 crc kubenswrapper[4710]: I1009 10:10:37.838245 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bvn9v/crc-debug-tnnzr" Oct 09 10:10:37 crc kubenswrapper[4710]: I1009 10:10:37.855113 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/48a47dcf-7415-4c37-86ac-ea013f15a06e-host\") pod \"crc-debug-tnnzr\" (UID: \"48a47dcf-7415-4c37-86ac-ea013f15a06e\") " pod="openshift-must-gather-bvn9v/crc-debug-tnnzr" Oct 09 10:10:37 crc kubenswrapper[4710]: I1009 10:10:37.855168 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swgg2\" (UniqueName: \"kubernetes.io/projected/48a47dcf-7415-4c37-86ac-ea013f15a06e-kube-api-access-swgg2\") pod \"crc-debug-tnnzr\" (UID: \"48a47dcf-7415-4c37-86ac-ea013f15a06e\") " pod="openshift-must-gather-bvn9v/crc-debug-tnnzr" Oct 09 10:10:37 crc kubenswrapper[4710]: I1009 10:10:37.957506 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/48a47dcf-7415-4c37-86ac-ea013f15a06e-host\") pod \"crc-debug-tnnzr\" (UID: \"48a47dcf-7415-4c37-86ac-ea013f15a06e\") " pod="openshift-must-gather-bvn9v/crc-debug-tnnzr" Oct 09 10:10:37 crc kubenswrapper[4710]: I1009 10:10:37.957552 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swgg2\" (UniqueName: \"kubernetes.io/projected/48a47dcf-7415-4c37-86ac-ea013f15a06e-kube-api-access-swgg2\") pod \"crc-debug-tnnzr\" (UID: \"48a47dcf-7415-4c37-86ac-ea013f15a06e\") " pod="openshift-must-gather-bvn9v/crc-debug-tnnzr" Oct 09 10:10:37 crc kubenswrapper[4710]: I1009 10:10:37.957689 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/48a47dcf-7415-4c37-86ac-ea013f15a06e-host\") pod \"crc-debug-tnnzr\" (UID: \"48a47dcf-7415-4c37-86ac-ea013f15a06e\") " pod="openshift-must-gather-bvn9v/crc-debug-tnnzr" Oct 09 10:10:37 crc kubenswrapper[4710]: I1009 10:10:37.976020 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swgg2\" (UniqueName: \"kubernetes.io/projected/48a47dcf-7415-4c37-86ac-ea013f15a06e-kube-api-access-swgg2\") pod \"crc-debug-tnnzr\" (UID: \"48a47dcf-7415-4c37-86ac-ea013f15a06e\") " pod="openshift-must-gather-bvn9v/crc-debug-tnnzr" Oct 09 10:10:38 crc kubenswrapper[4710]: I1009 10:10:38.153189 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bvn9v/crc-debug-tnnzr" Oct 09 10:10:38 crc kubenswrapper[4710]: W1009 10:10:38.174651 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod48a47dcf_7415_4c37_86ac_ea013f15a06e.slice/crio-36c70334fe06c591067e3b904b5f6eb79ddf701b71247cc72d4b1daf411a13ea WatchSource:0}: Error finding container 36c70334fe06c591067e3b904b5f6eb79ddf701b71247cc72d4b1daf411a13ea: Status 404 returned error can't find the container with id 36c70334fe06c591067e3b904b5f6eb79ddf701b71247cc72d4b1daf411a13ea Oct 09 10:10:39 crc kubenswrapper[4710]: I1009 10:10:39.044554 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bvn9v/crc-debug-tnnzr" event={"ID":"48a47dcf-7415-4c37-86ac-ea013f15a06e","Type":"ContainerStarted","Data":"20b08aa19bf9f6143cebd648de97adcf3f0365408503554a7d482014b0852072"} Oct 09 10:10:39 crc kubenswrapper[4710]: I1009 10:10:39.044753 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bvn9v/crc-debug-tnnzr" event={"ID":"48a47dcf-7415-4c37-86ac-ea013f15a06e","Type":"ContainerStarted","Data":"36c70334fe06c591067e3b904b5f6eb79ddf701b71247cc72d4b1daf411a13ea"} Oct 09 10:10:39 crc kubenswrapper[4710]: I1009 10:10:39.056876 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-bvn9v/crc-debug-tnnzr" podStartSLOduration=2.056859471 podStartE2EDuration="2.056859471s" podCreationTimestamp="2025-10-09 10:10:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 10:10:39.053752171 +0000 UTC m=+3962.543860568" watchObservedRunningTime="2025-10-09 10:10:39.056859471 +0000 UTC m=+3962.546967867" Oct 09 10:10:52 crc kubenswrapper[4710]: I1009 10:10:52.887796 4710 scope.go:117] "RemoveContainer" containerID="80be40503dcb87a8f2d96132c02b4cf1de1444dec1ec891626553fce7ec939ba" Oct 09 10:10:52 crc kubenswrapper[4710]: I1009 10:10:52.947448 4710 scope.go:117] "RemoveContainer" containerID="e734ff99b15a31351d67c5a557f7c3f5f61b590037e029b23679f4bc9a27ae4f" Oct 09 10:11:06 crc kubenswrapper[4710]: I1009 10:11:06.224335 4710 generic.go:334] "Generic (PLEG): container finished" podID="48a47dcf-7415-4c37-86ac-ea013f15a06e" containerID="20b08aa19bf9f6143cebd648de97adcf3f0365408503554a7d482014b0852072" exitCode=0 Oct 09 10:11:06 crc kubenswrapper[4710]: I1009 10:11:06.224391 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bvn9v/crc-debug-tnnzr" event={"ID":"48a47dcf-7415-4c37-86ac-ea013f15a06e","Type":"ContainerDied","Data":"20b08aa19bf9f6143cebd648de97adcf3f0365408503554a7d482014b0852072"} Oct 09 10:11:07 crc kubenswrapper[4710]: I1009 10:11:07.465530 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bvn9v/crc-debug-tnnzr" Oct 09 10:11:07 crc kubenswrapper[4710]: I1009 10:11:07.498205 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-bvn9v/crc-debug-tnnzr"] Oct 09 10:11:07 crc kubenswrapper[4710]: I1009 10:11:07.507491 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-bvn9v/crc-debug-tnnzr"] Oct 09 10:11:07 crc kubenswrapper[4710]: I1009 10:11:07.564241 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swgg2\" (UniqueName: \"kubernetes.io/projected/48a47dcf-7415-4c37-86ac-ea013f15a06e-kube-api-access-swgg2\") pod \"48a47dcf-7415-4c37-86ac-ea013f15a06e\" (UID: \"48a47dcf-7415-4c37-86ac-ea013f15a06e\") " Oct 09 10:11:07 crc kubenswrapper[4710]: I1009 10:11:07.564610 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/48a47dcf-7415-4c37-86ac-ea013f15a06e-host\") pod \"48a47dcf-7415-4c37-86ac-ea013f15a06e\" (UID: \"48a47dcf-7415-4c37-86ac-ea013f15a06e\") " Oct 09 10:11:07 crc kubenswrapper[4710]: I1009 10:11:07.564701 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/48a47dcf-7415-4c37-86ac-ea013f15a06e-host" (OuterVolumeSpecName: "host") pod "48a47dcf-7415-4c37-86ac-ea013f15a06e" (UID: "48a47dcf-7415-4c37-86ac-ea013f15a06e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 10:11:07 crc kubenswrapper[4710]: I1009 10:11:07.565115 4710 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/48a47dcf-7415-4c37-86ac-ea013f15a06e-host\") on node \"crc\" DevicePath \"\"" Oct 09 10:11:07 crc kubenswrapper[4710]: I1009 10:11:07.570570 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48a47dcf-7415-4c37-86ac-ea013f15a06e-kube-api-access-swgg2" (OuterVolumeSpecName: "kube-api-access-swgg2") pod "48a47dcf-7415-4c37-86ac-ea013f15a06e" (UID: "48a47dcf-7415-4c37-86ac-ea013f15a06e"). InnerVolumeSpecName "kube-api-access-swgg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 10:11:07 crc kubenswrapper[4710]: I1009 10:11:07.666759 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swgg2\" (UniqueName: \"kubernetes.io/projected/48a47dcf-7415-4c37-86ac-ea013f15a06e-kube-api-access-swgg2\") on node \"crc\" DevicePath \"\"" Oct 09 10:11:08 crc kubenswrapper[4710]: I1009 10:11:08.239849 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="36c70334fe06c591067e3b904b5f6eb79ddf701b71247cc72d4b1daf411a13ea" Oct 09 10:11:08 crc kubenswrapper[4710]: I1009 10:11:08.239911 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bvn9v/crc-debug-tnnzr" Oct 09 10:11:08 crc kubenswrapper[4710]: I1009 10:11:08.684405 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-bvn9v/crc-debug-zmgrb"] Oct 09 10:11:08 crc kubenswrapper[4710]: E1009 10:11:08.685865 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48a47dcf-7415-4c37-86ac-ea013f15a06e" containerName="container-00" Oct 09 10:11:08 crc kubenswrapper[4710]: I1009 10:11:08.685879 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="48a47dcf-7415-4c37-86ac-ea013f15a06e" containerName="container-00" Oct 09 10:11:08 crc kubenswrapper[4710]: I1009 10:11:08.686049 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="48a47dcf-7415-4c37-86ac-ea013f15a06e" containerName="container-00" Oct 09 10:11:08 crc kubenswrapper[4710]: I1009 10:11:08.686579 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bvn9v/crc-debug-zmgrb" Oct 09 10:11:08 crc kubenswrapper[4710]: I1009 10:11:08.789461 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjdw7\" (UniqueName: \"kubernetes.io/projected/5d87ce11-925f-49f1-9b82-094352802fd0-kube-api-access-qjdw7\") pod \"crc-debug-zmgrb\" (UID: \"5d87ce11-925f-49f1-9b82-094352802fd0\") " pod="openshift-must-gather-bvn9v/crc-debug-zmgrb" Oct 09 10:11:08 crc kubenswrapper[4710]: I1009 10:11:08.789503 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5d87ce11-925f-49f1-9b82-094352802fd0-host\") pod \"crc-debug-zmgrb\" (UID: \"5d87ce11-925f-49f1-9b82-094352802fd0\") " pod="openshift-must-gather-bvn9v/crc-debug-zmgrb" Oct 09 10:11:08 crc kubenswrapper[4710]: I1009 10:11:08.823593 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48a47dcf-7415-4c37-86ac-ea013f15a06e" path="/var/lib/kubelet/pods/48a47dcf-7415-4c37-86ac-ea013f15a06e/volumes" Oct 09 10:11:08 crc kubenswrapper[4710]: I1009 10:11:08.891338 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjdw7\" (UniqueName: \"kubernetes.io/projected/5d87ce11-925f-49f1-9b82-094352802fd0-kube-api-access-qjdw7\") pod \"crc-debug-zmgrb\" (UID: \"5d87ce11-925f-49f1-9b82-094352802fd0\") " pod="openshift-must-gather-bvn9v/crc-debug-zmgrb" Oct 09 10:11:08 crc kubenswrapper[4710]: I1009 10:11:08.891618 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5d87ce11-925f-49f1-9b82-094352802fd0-host\") pod \"crc-debug-zmgrb\" (UID: \"5d87ce11-925f-49f1-9b82-094352802fd0\") " pod="openshift-must-gather-bvn9v/crc-debug-zmgrb" Oct 09 10:11:08 crc kubenswrapper[4710]: I1009 10:11:08.891762 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5d87ce11-925f-49f1-9b82-094352802fd0-host\") pod \"crc-debug-zmgrb\" (UID: \"5d87ce11-925f-49f1-9b82-094352802fd0\") " pod="openshift-must-gather-bvn9v/crc-debug-zmgrb" Oct 09 10:11:08 crc kubenswrapper[4710]: I1009 10:11:08.913884 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjdw7\" (UniqueName: \"kubernetes.io/projected/5d87ce11-925f-49f1-9b82-094352802fd0-kube-api-access-qjdw7\") pod \"crc-debug-zmgrb\" (UID: \"5d87ce11-925f-49f1-9b82-094352802fd0\") " pod="openshift-must-gather-bvn9v/crc-debug-zmgrb" Oct 09 10:11:09 crc kubenswrapper[4710]: I1009 10:11:09.006372 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bvn9v/crc-debug-zmgrb" Oct 09 10:11:09 crc kubenswrapper[4710]: I1009 10:11:09.248077 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bvn9v/crc-debug-zmgrb" event={"ID":"5d87ce11-925f-49f1-9b82-094352802fd0","Type":"ContainerStarted","Data":"8e9f5579acd4468c038780321ab9a46924d5e6939752202d244a735d82370aff"} Oct 09 10:11:09 crc kubenswrapper[4710]: I1009 10:11:09.248378 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bvn9v/crc-debug-zmgrb" event={"ID":"5d87ce11-925f-49f1-9b82-094352802fd0","Type":"ContainerStarted","Data":"1ca9bfab9368a0f8b3b38aade448377503c6755f1e12995556c0c68319e01bc2"} Oct 09 10:11:09 crc kubenswrapper[4710]: I1009 10:11:09.263382 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-bvn9v/crc-debug-zmgrb" podStartSLOduration=1.263368214 podStartE2EDuration="1.263368214s" podCreationTimestamp="2025-10-09 10:11:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 10:11:09.259071201 +0000 UTC m=+3992.749179598" watchObservedRunningTime="2025-10-09 10:11:09.263368214 +0000 UTC m=+3992.753476611" Oct 09 10:11:10 crc kubenswrapper[4710]: I1009 10:11:10.257780 4710 generic.go:334] "Generic (PLEG): container finished" podID="5d87ce11-925f-49f1-9b82-094352802fd0" containerID="8e9f5579acd4468c038780321ab9a46924d5e6939752202d244a735d82370aff" exitCode=0 Oct 09 10:11:10 crc kubenswrapper[4710]: I1009 10:11:10.257861 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bvn9v/crc-debug-zmgrb" event={"ID":"5d87ce11-925f-49f1-9b82-094352802fd0","Type":"ContainerDied","Data":"8e9f5579acd4468c038780321ab9a46924d5e6939752202d244a735d82370aff"} Oct 09 10:11:11 crc kubenswrapper[4710]: I1009 10:11:11.349074 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bvn9v/crc-debug-zmgrb" Oct 09 10:11:11 crc kubenswrapper[4710]: I1009 10:11:11.386577 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-bvn9v/crc-debug-zmgrb"] Oct 09 10:11:11 crc kubenswrapper[4710]: I1009 10:11:11.398888 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-bvn9v/crc-debug-zmgrb"] Oct 09 10:11:11 crc kubenswrapper[4710]: I1009 10:11:11.546995 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5d87ce11-925f-49f1-9b82-094352802fd0-host\") pod \"5d87ce11-925f-49f1-9b82-094352802fd0\" (UID: \"5d87ce11-925f-49f1-9b82-094352802fd0\") " Oct 09 10:11:11 crc kubenswrapper[4710]: I1009 10:11:11.547092 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5d87ce11-925f-49f1-9b82-094352802fd0-host" (OuterVolumeSpecName: "host") pod "5d87ce11-925f-49f1-9b82-094352802fd0" (UID: "5d87ce11-925f-49f1-9b82-094352802fd0"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 10:11:11 crc kubenswrapper[4710]: I1009 10:11:11.547313 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qjdw7\" (UniqueName: \"kubernetes.io/projected/5d87ce11-925f-49f1-9b82-094352802fd0-kube-api-access-qjdw7\") pod \"5d87ce11-925f-49f1-9b82-094352802fd0\" (UID: \"5d87ce11-925f-49f1-9b82-094352802fd0\") " Oct 09 10:11:11 crc kubenswrapper[4710]: I1009 10:11:11.548411 4710 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5d87ce11-925f-49f1-9b82-094352802fd0-host\") on node \"crc\" DevicePath \"\"" Oct 09 10:11:11 crc kubenswrapper[4710]: I1009 10:11:11.554670 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d87ce11-925f-49f1-9b82-094352802fd0-kube-api-access-qjdw7" (OuterVolumeSpecName: "kube-api-access-qjdw7") pod "5d87ce11-925f-49f1-9b82-094352802fd0" (UID: "5d87ce11-925f-49f1-9b82-094352802fd0"). InnerVolumeSpecName "kube-api-access-qjdw7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 10:11:11 crc kubenswrapper[4710]: I1009 10:11:11.650737 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qjdw7\" (UniqueName: \"kubernetes.io/projected/5d87ce11-925f-49f1-9b82-094352802fd0-kube-api-access-qjdw7\") on node \"crc\" DevicePath \"\"" Oct 09 10:11:12 crc kubenswrapper[4710]: I1009 10:11:12.273265 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ca9bfab9368a0f8b3b38aade448377503c6755f1e12995556c0c68319e01bc2" Oct 09 10:11:12 crc kubenswrapper[4710]: I1009 10:11:12.273326 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bvn9v/crc-debug-zmgrb" Oct 09 10:11:12 crc kubenswrapper[4710]: I1009 10:11:12.589400 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-bvn9v/crc-debug-bs4hx"] Oct 09 10:11:12 crc kubenswrapper[4710]: E1009 10:11:12.589727 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d87ce11-925f-49f1-9b82-094352802fd0" containerName="container-00" Oct 09 10:11:12 crc kubenswrapper[4710]: I1009 10:11:12.589740 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d87ce11-925f-49f1-9b82-094352802fd0" containerName="container-00" Oct 09 10:11:12 crc kubenswrapper[4710]: I1009 10:11:12.589910 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d87ce11-925f-49f1-9b82-094352802fd0" containerName="container-00" Oct 09 10:11:12 crc kubenswrapper[4710]: I1009 10:11:12.590422 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bvn9v/crc-debug-bs4hx" Oct 09 10:11:12 crc kubenswrapper[4710]: I1009 10:11:12.670113 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d948a8d2-82e1-4b00-b8a3-bd07474bba15-host\") pod \"crc-debug-bs4hx\" (UID: \"d948a8d2-82e1-4b00-b8a3-bd07474bba15\") " pod="openshift-must-gather-bvn9v/crc-debug-bs4hx" Oct 09 10:11:12 crc kubenswrapper[4710]: I1009 10:11:12.670417 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5p9jf\" (UniqueName: \"kubernetes.io/projected/d948a8d2-82e1-4b00-b8a3-bd07474bba15-kube-api-access-5p9jf\") pod \"crc-debug-bs4hx\" (UID: \"d948a8d2-82e1-4b00-b8a3-bd07474bba15\") " pod="openshift-must-gather-bvn9v/crc-debug-bs4hx" Oct 09 10:11:12 crc kubenswrapper[4710]: I1009 10:11:12.771474 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d948a8d2-82e1-4b00-b8a3-bd07474bba15-host\") pod \"crc-debug-bs4hx\" (UID: \"d948a8d2-82e1-4b00-b8a3-bd07474bba15\") " pod="openshift-must-gather-bvn9v/crc-debug-bs4hx" Oct 09 10:11:12 crc kubenswrapper[4710]: I1009 10:11:12.771539 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5p9jf\" (UniqueName: \"kubernetes.io/projected/d948a8d2-82e1-4b00-b8a3-bd07474bba15-kube-api-access-5p9jf\") pod \"crc-debug-bs4hx\" (UID: \"d948a8d2-82e1-4b00-b8a3-bd07474bba15\") " pod="openshift-must-gather-bvn9v/crc-debug-bs4hx" Oct 09 10:11:12 crc kubenswrapper[4710]: I1009 10:11:12.771593 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d948a8d2-82e1-4b00-b8a3-bd07474bba15-host\") pod \"crc-debug-bs4hx\" (UID: \"d948a8d2-82e1-4b00-b8a3-bd07474bba15\") " pod="openshift-must-gather-bvn9v/crc-debug-bs4hx" Oct 09 10:11:12 crc kubenswrapper[4710]: I1009 10:11:12.787139 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5p9jf\" (UniqueName: \"kubernetes.io/projected/d948a8d2-82e1-4b00-b8a3-bd07474bba15-kube-api-access-5p9jf\") pod \"crc-debug-bs4hx\" (UID: \"d948a8d2-82e1-4b00-b8a3-bd07474bba15\") " pod="openshift-must-gather-bvn9v/crc-debug-bs4hx" Oct 09 10:11:12 crc kubenswrapper[4710]: I1009 10:11:12.823650 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d87ce11-925f-49f1-9b82-094352802fd0" path="/var/lib/kubelet/pods/5d87ce11-925f-49f1-9b82-094352802fd0/volumes" Oct 09 10:11:12 crc kubenswrapper[4710]: I1009 10:11:12.904226 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bvn9v/crc-debug-bs4hx" Oct 09 10:11:13 crc kubenswrapper[4710]: I1009 10:11:13.282679 4710 generic.go:334] "Generic (PLEG): container finished" podID="d948a8d2-82e1-4b00-b8a3-bd07474bba15" containerID="e24a483c3d4009b45a3a5de823883db1b7f253c46e4667d6089e76d02e38bafa" exitCode=0 Oct 09 10:11:13 crc kubenswrapper[4710]: I1009 10:11:13.282724 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bvn9v/crc-debug-bs4hx" event={"ID":"d948a8d2-82e1-4b00-b8a3-bd07474bba15","Type":"ContainerDied","Data":"e24a483c3d4009b45a3a5de823883db1b7f253c46e4667d6089e76d02e38bafa"} Oct 09 10:11:13 crc kubenswrapper[4710]: I1009 10:11:13.282974 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bvn9v/crc-debug-bs4hx" event={"ID":"d948a8d2-82e1-4b00-b8a3-bd07474bba15","Type":"ContainerStarted","Data":"3097d6584a105a8797679455345e493121b370a8a4a844f3301f29a432895d65"} Oct 09 10:11:13 crc kubenswrapper[4710]: I1009 10:11:13.318569 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-bvn9v/crc-debug-bs4hx"] Oct 09 10:11:13 crc kubenswrapper[4710]: I1009 10:11:13.327767 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-bvn9v/crc-debug-bs4hx"] Oct 09 10:11:14 crc kubenswrapper[4710]: I1009 10:11:14.554087 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bvn9v/crc-debug-bs4hx" Oct 09 10:11:14 crc kubenswrapper[4710]: I1009 10:11:14.701343 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5p9jf\" (UniqueName: \"kubernetes.io/projected/d948a8d2-82e1-4b00-b8a3-bd07474bba15-kube-api-access-5p9jf\") pod \"d948a8d2-82e1-4b00-b8a3-bd07474bba15\" (UID: \"d948a8d2-82e1-4b00-b8a3-bd07474bba15\") " Oct 09 10:11:14 crc kubenswrapper[4710]: I1009 10:11:14.701538 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d948a8d2-82e1-4b00-b8a3-bd07474bba15-host\") pod \"d948a8d2-82e1-4b00-b8a3-bd07474bba15\" (UID: \"d948a8d2-82e1-4b00-b8a3-bd07474bba15\") " Oct 09 10:11:14 crc kubenswrapper[4710]: I1009 10:11:14.701832 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d948a8d2-82e1-4b00-b8a3-bd07474bba15-host" (OuterVolumeSpecName: "host") pod "d948a8d2-82e1-4b00-b8a3-bd07474bba15" (UID: "d948a8d2-82e1-4b00-b8a3-bd07474bba15"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 09 10:11:14 crc kubenswrapper[4710]: I1009 10:11:14.702854 4710 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d948a8d2-82e1-4b00-b8a3-bd07474bba15-host\") on node \"crc\" DevicePath \"\"" Oct 09 10:11:14 crc kubenswrapper[4710]: I1009 10:11:14.707503 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d948a8d2-82e1-4b00-b8a3-bd07474bba15-kube-api-access-5p9jf" (OuterVolumeSpecName: "kube-api-access-5p9jf") pod "d948a8d2-82e1-4b00-b8a3-bd07474bba15" (UID: "d948a8d2-82e1-4b00-b8a3-bd07474bba15"). InnerVolumeSpecName "kube-api-access-5p9jf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 10:11:14 crc kubenswrapper[4710]: I1009 10:11:14.804771 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5p9jf\" (UniqueName: \"kubernetes.io/projected/d948a8d2-82e1-4b00-b8a3-bd07474bba15-kube-api-access-5p9jf\") on node \"crc\" DevicePath \"\"" Oct 09 10:11:14 crc kubenswrapper[4710]: I1009 10:11:14.823608 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d948a8d2-82e1-4b00-b8a3-bd07474bba15" path="/var/lib/kubelet/pods/d948a8d2-82e1-4b00-b8a3-bd07474bba15/volumes" Oct 09 10:11:15 crc kubenswrapper[4710]: I1009 10:11:15.299440 4710 scope.go:117] "RemoveContainer" containerID="e24a483c3d4009b45a3a5de823883db1b7f253c46e4667d6089e76d02e38bafa" Oct 09 10:11:15 crc kubenswrapper[4710]: I1009 10:11:15.299580 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bvn9v/crc-debug-bs4hx" Oct 09 10:11:19 crc kubenswrapper[4710]: I1009 10:11:19.126042 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dmzn9"] Oct 09 10:11:19 crc kubenswrapper[4710]: E1009 10:11:19.126584 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d948a8d2-82e1-4b00-b8a3-bd07474bba15" containerName="container-00" Oct 09 10:11:19 crc kubenswrapper[4710]: I1009 10:11:19.126597 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="d948a8d2-82e1-4b00-b8a3-bd07474bba15" containerName="container-00" Oct 09 10:11:19 crc kubenswrapper[4710]: I1009 10:11:19.126744 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="d948a8d2-82e1-4b00-b8a3-bd07474bba15" containerName="container-00" Oct 09 10:11:19 crc kubenswrapper[4710]: I1009 10:11:19.127855 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dmzn9" Oct 09 10:11:19 crc kubenswrapper[4710]: I1009 10:11:19.139144 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dmzn9"] Oct 09 10:11:19 crc kubenswrapper[4710]: I1009 10:11:19.202189 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/674b5100-5dbd-46b7-b857-77bb2c1681ab-catalog-content\") pod \"community-operators-dmzn9\" (UID: \"674b5100-5dbd-46b7-b857-77bb2c1681ab\") " pod="openshift-marketplace/community-operators-dmzn9" Oct 09 10:11:19 crc kubenswrapper[4710]: I1009 10:11:19.202263 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/674b5100-5dbd-46b7-b857-77bb2c1681ab-utilities\") pod \"community-operators-dmzn9\" (UID: \"674b5100-5dbd-46b7-b857-77bb2c1681ab\") " pod="openshift-marketplace/community-operators-dmzn9" Oct 09 10:11:19 crc kubenswrapper[4710]: I1009 10:11:19.202350 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wvdf\" (UniqueName: \"kubernetes.io/projected/674b5100-5dbd-46b7-b857-77bb2c1681ab-kube-api-access-8wvdf\") pod \"community-operators-dmzn9\" (UID: \"674b5100-5dbd-46b7-b857-77bb2c1681ab\") " pod="openshift-marketplace/community-operators-dmzn9" Oct 09 10:11:19 crc kubenswrapper[4710]: I1009 10:11:19.303777 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/674b5100-5dbd-46b7-b857-77bb2c1681ab-catalog-content\") pod \"community-operators-dmzn9\" (UID: \"674b5100-5dbd-46b7-b857-77bb2c1681ab\") " pod="openshift-marketplace/community-operators-dmzn9" Oct 09 10:11:19 crc kubenswrapper[4710]: I1009 10:11:19.303859 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/674b5100-5dbd-46b7-b857-77bb2c1681ab-utilities\") pod \"community-operators-dmzn9\" (UID: \"674b5100-5dbd-46b7-b857-77bb2c1681ab\") " pod="openshift-marketplace/community-operators-dmzn9" Oct 09 10:11:19 crc kubenswrapper[4710]: I1009 10:11:19.303976 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wvdf\" (UniqueName: \"kubernetes.io/projected/674b5100-5dbd-46b7-b857-77bb2c1681ab-kube-api-access-8wvdf\") pod \"community-operators-dmzn9\" (UID: \"674b5100-5dbd-46b7-b857-77bb2c1681ab\") " pod="openshift-marketplace/community-operators-dmzn9" Oct 09 10:11:19 crc kubenswrapper[4710]: I1009 10:11:19.304274 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/674b5100-5dbd-46b7-b857-77bb2c1681ab-catalog-content\") pod \"community-operators-dmzn9\" (UID: \"674b5100-5dbd-46b7-b857-77bb2c1681ab\") " pod="openshift-marketplace/community-operators-dmzn9" Oct 09 10:11:19 crc kubenswrapper[4710]: I1009 10:11:19.304291 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/674b5100-5dbd-46b7-b857-77bb2c1681ab-utilities\") pod \"community-operators-dmzn9\" (UID: \"674b5100-5dbd-46b7-b857-77bb2c1681ab\") " pod="openshift-marketplace/community-operators-dmzn9" Oct 09 10:11:19 crc kubenswrapper[4710]: I1009 10:11:19.319024 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wvdf\" (UniqueName: \"kubernetes.io/projected/674b5100-5dbd-46b7-b857-77bb2c1681ab-kube-api-access-8wvdf\") pod \"community-operators-dmzn9\" (UID: \"674b5100-5dbd-46b7-b857-77bb2c1681ab\") " pod="openshift-marketplace/community-operators-dmzn9" Oct 09 10:11:19 crc kubenswrapper[4710]: I1009 10:11:19.443011 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dmzn9" Oct 09 10:11:19 crc kubenswrapper[4710]: I1009 10:11:19.902942 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dmzn9"] Oct 09 10:11:19 crc kubenswrapper[4710]: W1009 10:11:19.909845 4710 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod674b5100_5dbd_46b7_b857_77bb2c1681ab.slice/crio-944fe801d07f6b76056b61ff47bf54a1582eab9afd93b3cb74645892ef189df6 WatchSource:0}: Error finding container 944fe801d07f6b76056b61ff47bf54a1582eab9afd93b3cb74645892ef189df6: Status 404 returned error can't find the container with id 944fe801d07f6b76056b61ff47bf54a1582eab9afd93b3cb74645892ef189df6 Oct 09 10:11:20 crc kubenswrapper[4710]: I1009 10:11:20.339672 4710 generic.go:334] "Generic (PLEG): container finished" podID="674b5100-5dbd-46b7-b857-77bb2c1681ab" containerID="7bfeadfc00be2cc8b9a913ef00f7f303b29ccc2997e4a62f11f2bbb6fb2f7e79" exitCode=0 Oct 09 10:11:20 crc kubenswrapper[4710]: I1009 10:11:20.339717 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmzn9" event={"ID":"674b5100-5dbd-46b7-b857-77bb2c1681ab","Type":"ContainerDied","Data":"7bfeadfc00be2cc8b9a913ef00f7f303b29ccc2997e4a62f11f2bbb6fb2f7e79"} Oct 09 10:11:20 crc kubenswrapper[4710]: I1009 10:11:20.339885 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmzn9" event={"ID":"674b5100-5dbd-46b7-b857-77bb2c1681ab","Type":"ContainerStarted","Data":"944fe801d07f6b76056b61ff47bf54a1582eab9afd93b3cb74645892ef189df6"} Oct 09 10:11:21 crc kubenswrapper[4710]: I1009 10:11:21.352500 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmzn9" event={"ID":"674b5100-5dbd-46b7-b857-77bb2c1681ab","Type":"ContainerStarted","Data":"93a0505a835f01282c088216e82fd4e9208d9e59d184f6cd0ec0864fdadf233d"} Oct 09 10:11:22 crc kubenswrapper[4710]: I1009 10:11:22.370519 4710 generic.go:334] "Generic (PLEG): container finished" podID="674b5100-5dbd-46b7-b857-77bb2c1681ab" containerID="93a0505a835f01282c088216e82fd4e9208d9e59d184f6cd0ec0864fdadf233d" exitCode=0 Oct 09 10:11:22 crc kubenswrapper[4710]: I1009 10:11:22.370619 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmzn9" event={"ID":"674b5100-5dbd-46b7-b857-77bb2c1681ab","Type":"ContainerDied","Data":"93a0505a835f01282c088216e82fd4e9208d9e59d184f6cd0ec0864fdadf233d"} Oct 09 10:11:24 crc kubenswrapper[4710]: I1009 10:11:24.386394 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmzn9" event={"ID":"674b5100-5dbd-46b7-b857-77bb2c1681ab","Type":"ContainerStarted","Data":"6eedacb90e05a0ee5b20eb5f154904eea4065099f8d56ca610478de43014c6dd"} Oct 09 10:11:24 crc kubenswrapper[4710]: I1009 10:11:24.399862 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dmzn9" podStartSLOduration=2.785170744 podStartE2EDuration="5.399848055s" podCreationTimestamp="2025-10-09 10:11:19 +0000 UTC" firstStartedPulling="2025-10-09 10:11:20.341385498 +0000 UTC m=+4003.831493895" lastFinishedPulling="2025-10-09 10:11:22.956062809 +0000 UTC m=+4006.446171206" observedRunningTime="2025-10-09 10:11:24.398065885 +0000 UTC m=+4007.888174271" watchObservedRunningTime="2025-10-09 10:11:24.399848055 +0000 UTC m=+4007.889956452" Oct 09 10:11:29 crc kubenswrapper[4710]: I1009 10:11:29.443505 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dmzn9" Oct 09 10:11:29 crc kubenswrapper[4710]: I1009 10:11:29.443869 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dmzn9" Oct 09 10:11:29 crc kubenswrapper[4710]: I1009 10:11:29.482768 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dmzn9" Oct 09 10:11:30 crc kubenswrapper[4710]: I1009 10:11:30.622944 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dmzn9" Oct 09 10:11:30 crc kubenswrapper[4710]: I1009 10:11:30.660772 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dmzn9"] Oct 09 10:11:32 crc kubenswrapper[4710]: I1009 10:11:32.433308 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dmzn9" podUID="674b5100-5dbd-46b7-b857-77bb2c1681ab" containerName="registry-server" containerID="cri-o://6eedacb90e05a0ee5b20eb5f154904eea4065099f8d56ca610478de43014c6dd" gracePeriod=2 Oct 09 10:11:32 crc kubenswrapper[4710]: I1009 10:11:32.979096 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dmzn9" Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.039265 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/674b5100-5dbd-46b7-b857-77bb2c1681ab-catalog-content\") pod \"674b5100-5dbd-46b7-b857-77bb2c1681ab\" (UID: \"674b5100-5dbd-46b7-b857-77bb2c1681ab\") " Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.039488 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/674b5100-5dbd-46b7-b857-77bb2c1681ab-utilities\") pod \"674b5100-5dbd-46b7-b857-77bb2c1681ab\" (UID: \"674b5100-5dbd-46b7-b857-77bb2c1681ab\") " Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.039523 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wvdf\" (UniqueName: \"kubernetes.io/projected/674b5100-5dbd-46b7-b857-77bb2c1681ab-kube-api-access-8wvdf\") pod \"674b5100-5dbd-46b7-b857-77bb2c1681ab\" (UID: \"674b5100-5dbd-46b7-b857-77bb2c1681ab\") " Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.041365 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/674b5100-5dbd-46b7-b857-77bb2c1681ab-utilities" (OuterVolumeSpecName: "utilities") pod "674b5100-5dbd-46b7-b857-77bb2c1681ab" (UID: "674b5100-5dbd-46b7-b857-77bb2c1681ab"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.045848 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/674b5100-5dbd-46b7-b857-77bb2c1681ab-kube-api-access-8wvdf" (OuterVolumeSpecName: "kube-api-access-8wvdf") pod "674b5100-5dbd-46b7-b857-77bb2c1681ab" (UID: "674b5100-5dbd-46b7-b857-77bb2c1681ab"). InnerVolumeSpecName "kube-api-access-8wvdf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.077916 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/674b5100-5dbd-46b7-b857-77bb2c1681ab-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "674b5100-5dbd-46b7-b857-77bb2c1681ab" (UID: "674b5100-5dbd-46b7-b857-77bb2c1681ab"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.141479 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/674b5100-5dbd-46b7-b857-77bb2c1681ab-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.141588 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wvdf\" (UniqueName: \"kubernetes.io/projected/674b5100-5dbd-46b7-b857-77bb2c1681ab-kube-api-access-8wvdf\") on node \"crc\" DevicePath \"\"" Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.141641 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/674b5100-5dbd-46b7-b857-77bb2c1681ab-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.440709 4710 generic.go:334] "Generic (PLEG): container finished" podID="674b5100-5dbd-46b7-b857-77bb2c1681ab" containerID="6eedacb90e05a0ee5b20eb5f154904eea4065099f8d56ca610478de43014c6dd" exitCode=0 Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.440744 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmzn9" event={"ID":"674b5100-5dbd-46b7-b857-77bb2c1681ab","Type":"ContainerDied","Data":"6eedacb90e05a0ee5b20eb5f154904eea4065099f8d56ca610478de43014c6dd"} Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.440768 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dmzn9" event={"ID":"674b5100-5dbd-46b7-b857-77bb2c1681ab","Type":"ContainerDied","Data":"944fe801d07f6b76056b61ff47bf54a1582eab9afd93b3cb74645892ef189df6"} Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.440786 4710 scope.go:117] "RemoveContainer" containerID="6eedacb90e05a0ee5b20eb5f154904eea4065099f8d56ca610478de43014c6dd" Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.440790 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dmzn9" Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.455227 4710 scope.go:117] "RemoveContainer" containerID="93a0505a835f01282c088216e82fd4e9208d9e59d184f6cd0ec0864fdadf233d" Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.463855 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dmzn9"] Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.470556 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dmzn9"] Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.481041 4710 scope.go:117] "RemoveContainer" containerID="7bfeadfc00be2cc8b9a913ef00f7f303b29ccc2997e4a62f11f2bbb6fb2f7e79" Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.505840 4710 scope.go:117] "RemoveContainer" containerID="6eedacb90e05a0ee5b20eb5f154904eea4065099f8d56ca610478de43014c6dd" Oct 09 10:11:33 crc kubenswrapper[4710]: E1009 10:11:33.506212 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6eedacb90e05a0ee5b20eb5f154904eea4065099f8d56ca610478de43014c6dd\": container with ID starting with 6eedacb90e05a0ee5b20eb5f154904eea4065099f8d56ca610478de43014c6dd not found: ID does not exist" containerID="6eedacb90e05a0ee5b20eb5f154904eea4065099f8d56ca610478de43014c6dd" Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.506237 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6eedacb90e05a0ee5b20eb5f154904eea4065099f8d56ca610478de43014c6dd"} err="failed to get container status \"6eedacb90e05a0ee5b20eb5f154904eea4065099f8d56ca610478de43014c6dd\": rpc error: code = NotFound desc = could not find container \"6eedacb90e05a0ee5b20eb5f154904eea4065099f8d56ca610478de43014c6dd\": container with ID starting with 6eedacb90e05a0ee5b20eb5f154904eea4065099f8d56ca610478de43014c6dd not found: ID does not exist" Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.506257 4710 scope.go:117] "RemoveContainer" containerID="93a0505a835f01282c088216e82fd4e9208d9e59d184f6cd0ec0864fdadf233d" Oct 09 10:11:33 crc kubenswrapper[4710]: E1009 10:11:33.506558 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93a0505a835f01282c088216e82fd4e9208d9e59d184f6cd0ec0864fdadf233d\": container with ID starting with 93a0505a835f01282c088216e82fd4e9208d9e59d184f6cd0ec0864fdadf233d not found: ID does not exist" containerID="93a0505a835f01282c088216e82fd4e9208d9e59d184f6cd0ec0864fdadf233d" Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.506640 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93a0505a835f01282c088216e82fd4e9208d9e59d184f6cd0ec0864fdadf233d"} err="failed to get container status \"93a0505a835f01282c088216e82fd4e9208d9e59d184f6cd0ec0864fdadf233d\": rpc error: code = NotFound desc = could not find container \"93a0505a835f01282c088216e82fd4e9208d9e59d184f6cd0ec0864fdadf233d\": container with ID starting with 93a0505a835f01282c088216e82fd4e9208d9e59d184f6cd0ec0864fdadf233d not found: ID does not exist" Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.506709 4710 scope.go:117] "RemoveContainer" containerID="7bfeadfc00be2cc8b9a913ef00f7f303b29ccc2997e4a62f11f2bbb6fb2f7e79" Oct 09 10:11:33 crc kubenswrapper[4710]: E1009 10:11:33.507100 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bfeadfc00be2cc8b9a913ef00f7f303b29ccc2997e4a62f11f2bbb6fb2f7e79\": container with ID starting with 7bfeadfc00be2cc8b9a913ef00f7f303b29ccc2997e4a62f11f2bbb6fb2f7e79 not found: ID does not exist" containerID="7bfeadfc00be2cc8b9a913ef00f7f303b29ccc2997e4a62f11f2bbb6fb2f7e79" Oct 09 10:11:33 crc kubenswrapper[4710]: I1009 10:11:33.507134 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bfeadfc00be2cc8b9a913ef00f7f303b29ccc2997e4a62f11f2bbb6fb2f7e79"} err="failed to get container status \"7bfeadfc00be2cc8b9a913ef00f7f303b29ccc2997e4a62f11f2bbb6fb2f7e79\": rpc error: code = NotFound desc = could not find container \"7bfeadfc00be2cc8b9a913ef00f7f303b29ccc2997e4a62f11f2bbb6fb2f7e79\": container with ID starting with 7bfeadfc00be2cc8b9a913ef00f7f303b29ccc2997e4a62f11f2bbb6fb2f7e79 not found: ID does not exist" Oct 09 10:11:34 crc kubenswrapper[4710]: I1009 10:11:34.823610 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="674b5100-5dbd-46b7-b857-77bb2c1681ab" path="/var/lib/kubelet/pods/674b5100-5dbd-46b7-b857-77bb2c1681ab/volumes" Oct 09 10:11:46 crc kubenswrapper[4710]: I1009 10:11:46.244118 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7c69bd8f44-7wmj7_d11eb962-d716-4dcf-9ec6-f82e6969640f/barbican-api/0.log" Oct 09 10:11:46 crc kubenswrapper[4710]: I1009 10:11:46.355865 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7c69bd8f44-7wmj7_d11eb962-d716-4dcf-9ec6-f82e6969640f/barbican-api-log/0.log" Oct 09 10:11:46 crc kubenswrapper[4710]: I1009 10:11:46.635634 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6b5698f678-pqrnp_5a03d872-b139-414d-a62f-953c23fb01a6/barbican-keystone-listener/0.log" Oct 09 10:11:46 crc kubenswrapper[4710]: I1009 10:11:46.759598 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6b5698f678-pqrnp_5a03d872-b139-414d-a62f-953c23fb01a6/barbican-keystone-listener-log/0.log" Oct 09 10:11:47 crc kubenswrapper[4710]: I1009 10:11:47.058545 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-768bd74b7c-lmgpx_c369359e-6e4c-478b-8ef5-0ebd384acbd8/barbican-worker/0.log" Oct 09 10:11:47 crc kubenswrapper[4710]: I1009 10:11:47.179569 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-768bd74b7c-lmgpx_c369359e-6e4c-478b-8ef5-0ebd384acbd8/barbican-worker-log/0.log" Oct 09 10:11:47 crc kubenswrapper[4710]: I1009 10:11:47.318713 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-pg2dc_1d4d6d90-9993-4a75-8ea7-e6d488a370b0/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:11:47 crc kubenswrapper[4710]: I1009 10:11:47.436064 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c38d2c59-6c38-4516-bc9c-793e554b04c6/ceilometer-central-agent/0.log" Oct 09 10:11:47 crc kubenswrapper[4710]: I1009 10:11:47.524077 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c38d2c59-6c38-4516-bc9c-793e554b04c6/ceilometer-notification-agent/0.log" Oct 09 10:11:47 crc kubenswrapper[4710]: I1009 10:11:47.562901 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c38d2c59-6c38-4516-bc9c-793e554b04c6/proxy-httpd/0.log" Oct 09 10:11:47 crc kubenswrapper[4710]: I1009 10:11:47.645334 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c38d2c59-6c38-4516-bc9c-793e554b04c6/sg-core/0.log" Oct 09 10:11:47 crc kubenswrapper[4710]: I1009 10:11:47.777839 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-edpm-deployment-openstack-edpm-ipam-jwvm2_18a1b835-afd2-4ceb-ad50-156b24a80601/ceph-client-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:11:47 crc kubenswrapper[4710]: I1009 10:11:47.908748 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dl4jk_f25f8196-f0c2-4299-8488-0538f69a70a0/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:11:48 crc kubenswrapper[4710]: I1009 10:11:48.068882 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1/cinder-api/0.log" Oct 09 10:11:48 crc kubenswrapper[4710]: I1009 10:11:48.149418 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_cdbcdc08-d8aa-4db7-9d86-d103f6b7e8b1/cinder-api-log/0.log" Oct 09 10:11:48 crc kubenswrapper[4710]: I1009 10:11:48.365021 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_773237c7-043e-4e8c-a646-6a24ab6cf3d5/probe/0.log" Oct 09 10:11:48 crc kubenswrapper[4710]: I1009 10:11:48.456946 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_773237c7-043e-4e8c-a646-6a24ab6cf3d5/cinder-backup/0.log" Oct 09 10:11:48 crc kubenswrapper[4710]: I1009 10:11:48.617726 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9883d700-efac-4450-81db-b1faf06dc645/cinder-scheduler/0.log" Oct 09 10:11:48 crc kubenswrapper[4710]: I1009 10:11:48.710178 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9883d700-efac-4450-81db-b1faf06dc645/probe/0.log" Oct 09 10:11:48 crc kubenswrapper[4710]: I1009 10:11:48.879913 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_9c352150-914d-40e6-8eb2-ecbf97b33bbc/cinder-volume/0.log" Oct 09 10:11:48 crc kubenswrapper[4710]: I1009 10:11:48.909126 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_9c352150-914d-40e6-8eb2-ecbf97b33bbc/probe/0.log" Oct 09 10:11:49 crc kubenswrapper[4710]: I1009 10:11:49.034481 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-l7p5p_fadd281b-1db6-4170-8ddd-12e4b65a8e5a/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:11:49 crc kubenswrapper[4710]: I1009 10:11:49.178656 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-kt7hb_5a884cb4-f253-4c96-9e29-5e60aff6f144/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:11:49 crc kubenswrapper[4710]: I1009 10:11:49.819041 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7887c4559f-7d5cl_892059c2-6b0b-48d9-ba51-fb86b0856c4a/init/0.log" Oct 09 10:11:49 crc kubenswrapper[4710]: I1009 10:11:49.942764 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7887c4559f-7d5cl_892059c2-6b0b-48d9-ba51-fb86b0856c4a/init/0.log" Oct 09 10:11:50 crc kubenswrapper[4710]: I1009 10:11:50.280005 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_353d2111-74ef-4b2e-b17d-5e0672f1a33d/glance-httpd/0.log" Oct 09 10:11:50 crc kubenswrapper[4710]: I1009 10:11:50.468614 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_353d2111-74ef-4b2e-b17d-5e0672f1a33d/glance-log/0.log" Oct 09 10:11:51 crc kubenswrapper[4710]: I1009 10:11:51.052063 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_48c52c4c-a94f-4044-bbbe-9c8e935f1a9c/glance-httpd/0.log" Oct 09 10:11:51 crc kubenswrapper[4710]: I1009 10:11:51.199372 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_48c52c4c-a94f-4044-bbbe-9c8e935f1a9c/glance-log/0.log" Oct 09 10:11:51 crc kubenswrapper[4710]: I1009 10:11:51.470738 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-67d94d7dc8-fvmp7_13eb4841-8d3a-4ef6-a2da-656bab482ab4/horizon/0.log" Oct 09 10:11:51 crc kubenswrapper[4710]: I1009 10:11:51.681586 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-67d94d7dc8-fvmp7_13eb4841-8d3a-4ef6-a2da-656bab482ab4/horizon-log/0.log" Oct 09 10:11:51 crc kubenswrapper[4710]: I1009 10:11:51.950785 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-6c5qh_77eea3d2-8a1b-4abe-bdcc-4f36d8b22fdf/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:11:52 crc kubenswrapper[4710]: I1009 10:11:52.162144 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-wtmjh_95b718d5-c979-4a2f-82a5-e0915b769b7a/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:11:52 crc kubenswrapper[4710]: I1009 10:11:52.633299 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7746794c77-ljwdn_aa709644-7781-443c-b0e3-d5936fff1dde/keystone-api/0.log" Oct 09 10:11:52 crc kubenswrapper[4710]: I1009 10:11:52.739375 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29333401-56psm_208c883f-3974-4994-802b-6c3bcc416326/keystone-cron/0.log" Oct 09 10:11:52 crc kubenswrapper[4710]: I1009 10:11:52.954806 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_5f2955ba-a1b4-4cad-8c8b-35d74d914474/kube-state-metrics/0.log" Oct 09 10:11:53 crc kubenswrapper[4710]: I1009 10:11:53.016163 4710 scope.go:117] "RemoveContainer" containerID="1ed176fab1467c9757de18ad57d8a13460a2576db7123184b7fe8459b344f607" Oct 09 10:11:53 crc kubenswrapper[4710]: I1009 10:11:53.045642 4710 scope.go:117] "RemoveContainer" containerID="a78ebf55ae7807204183385a1c25d8cdb1fae2a7e92c6cf88ed0f39329166648" Oct 09 10:11:53 crc kubenswrapper[4710]: I1009 10:11:53.482819 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-l7mlq_3197a42e-d565-4f24-9115-990a46dfc659/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:11:53 crc kubenswrapper[4710]: I1009 10:11:53.744043 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_b107b875-5e01-45ce-8702-dcc78cd19193/manila-api-log/0.log" Oct 09 10:11:53 crc kubenswrapper[4710]: I1009 10:11:53.912104 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7887c4559f-7d5cl_892059c2-6b0b-48d9-ba51-fb86b0856c4a/dnsmasq-dns/0.log" Oct 09 10:11:53 crc kubenswrapper[4710]: I1009 10:11:53.956590 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_b107b875-5e01-45ce-8702-dcc78cd19193/manila-api/0.log" Oct 09 10:11:54 crc kubenswrapper[4710]: I1009 10:11:54.096849 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_f91da5bb-5573-4165-8592-6dc828d64596/manila-scheduler/0.log" Oct 09 10:11:54 crc kubenswrapper[4710]: I1009 10:11:54.131685 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_f91da5bb-5573-4165-8592-6dc828d64596/probe/0.log" Oct 09 10:11:54 crc kubenswrapper[4710]: I1009 10:11:54.242537 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_3efa48f2-2c46-4963-aed1-aa57cb9ada01/manila-share/0.log" Oct 09 10:11:54 crc kubenswrapper[4710]: I1009 10:11:54.272539 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_3efa48f2-2c46-4963-aed1-aa57cb9ada01/probe/0.log" Oct 09 10:11:54 crc kubenswrapper[4710]: I1009 10:11:54.592773 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-bf8dfcdb5-zd6wv_ff8c79d6-681e-4c93-b80e-15c8ff06d6af/neutron-api/0.log" Oct 09 10:11:54 crc kubenswrapper[4710]: I1009 10:11:54.667860 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-bf8dfcdb5-zd6wv_ff8c79d6-681e-4c93-b80e-15c8ff06d6af/neutron-httpd/0.log" Oct 09 10:11:54 crc kubenswrapper[4710]: I1009 10:11:54.867986 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-t7bxl_7688fa72-f35a-4dd1-a1de-1eda8bf5ff77/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:11:55 crc kubenswrapper[4710]: I1009 10:11:55.391417 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_61eb69de-ee17-4084-95dc-0192a6d4a0d4/nova-api-log/0.log" Oct 09 10:11:55 crc kubenswrapper[4710]: I1009 10:11:55.612777 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_79cbbde5-3252-4efd-a000-95ec002a56bb/nova-cell0-conductor-conductor/0.log" Oct 09 10:11:55 crc kubenswrapper[4710]: I1009 10:11:55.741187 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_61eb69de-ee17-4084-95dc-0192a6d4a0d4/nova-api-api/0.log" Oct 09 10:11:55 crc kubenswrapper[4710]: I1009 10:11:55.953910 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_d7541f91-8a95-4a0e-9cdd-95252f38710b/nova-cell1-conductor-conductor/0.log" Oct 09 10:11:56 crc kubenswrapper[4710]: I1009 10:11:56.102886 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_b6bae005-9dec-4a1a-b8ca-3222b1c5f2c4/nova-cell1-novncproxy-novncproxy/0.log" Oct 09 10:11:56 crc kubenswrapper[4710]: I1009 10:11:56.297670 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-custom-ceph-edpm-deployment-openstack-edpm-ipam-6lt4v_f9e2c502-e067-49c7-b805-adc3d054f0cf/nova-custom-ceph-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:11:56 crc kubenswrapper[4710]: I1009 10:11:56.457383 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_0a140770-37ca-4b77-8eed-bc3ecfed72db/nova-metadata-log/0.log" Oct 09 10:11:56 crc kubenswrapper[4710]: I1009 10:11:56.965852 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_e717823b-a1aa-46c0-b1a6-be9ada2d596f/nova-scheduler-scheduler/0.log" Oct 09 10:11:57 crc kubenswrapper[4710]: I1009 10:11:57.157374 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_57584eef-cc17-41b8-a307-663a50cbf568/mysql-bootstrap/0.log" Oct 09 10:11:57 crc kubenswrapper[4710]: I1009 10:11:57.615063 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_57584eef-cc17-41b8-a307-663a50cbf568/mysql-bootstrap/0.log" Oct 09 10:11:57 crc kubenswrapper[4710]: I1009 10:11:57.620113 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_57584eef-cc17-41b8-a307-663a50cbf568/galera/0.log" Oct 09 10:11:57 crc kubenswrapper[4710]: I1009 10:11:57.858481 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_bc62be28-ccca-4b4b-b7d0-afabccec4047/mysql-bootstrap/0.log" Oct 09 10:11:57 crc kubenswrapper[4710]: I1009 10:11:57.915589 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_0a140770-37ca-4b77-8eed-bc3ecfed72db/nova-metadata-metadata/0.log" Oct 09 10:11:58 crc kubenswrapper[4710]: I1009 10:11:58.093894 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_bc62be28-ccca-4b4b-b7d0-afabccec4047/galera/0.log" Oct 09 10:11:58 crc kubenswrapper[4710]: I1009 10:11:58.104589 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_bc62be28-ccca-4b4b-b7d0-afabccec4047/mysql-bootstrap/0.log" Oct 09 10:11:58 crc kubenswrapper[4710]: I1009 10:11:58.299150 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_b7779e6d-a6fb-4d03-8636-0dafb2767cbc/openstackclient/0.log" Oct 09 10:11:58 crc kubenswrapper[4710]: I1009 10:11:58.417818 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ff4n7_8c4f1833-b6b6-4c51-bd5e-0b4cf749e848/ovn-controller/0.log" Oct 09 10:11:59 crc kubenswrapper[4710]: I1009 10:11:59.047582 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-vcl7d_50c1f1ba-8a7d-482b-841f-591355f9dd44/openstack-network-exporter/0.log" Oct 09 10:11:59 crc kubenswrapper[4710]: I1009 10:11:59.295699 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-b2pbr_72df4789-6551-4a9f-a8ba-6ed6f43f03f9/ovsdb-server-init/0.log" Oct 09 10:11:59 crc kubenswrapper[4710]: I1009 10:11:59.573357 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-b2pbr_72df4789-6551-4a9f-a8ba-6ed6f43f03f9/ovsdb-server/0.log" Oct 09 10:11:59 crc kubenswrapper[4710]: I1009 10:11:59.573941 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-b2pbr_72df4789-6551-4a9f-a8ba-6ed6f43f03f9/ovsdb-server-init/0.log" Oct 09 10:11:59 crc kubenswrapper[4710]: I1009 10:11:59.593521 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-b2pbr_72df4789-6551-4a9f-a8ba-6ed6f43f03f9/ovs-vswitchd/0.log" Oct 09 10:11:59 crc kubenswrapper[4710]: I1009 10:11:59.848177 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_4b6b2be0-00ba-434b-9310-32ee9f286c71/openstack-network-exporter/0.log" Oct 09 10:11:59 crc kubenswrapper[4710]: I1009 10:11:59.907321 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-fkddm_30e0d33b-9f4a-4209-ad66-d5f51af8deea/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:12:00 crc kubenswrapper[4710]: I1009 10:12:00.007418 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_4b6b2be0-00ba-434b-9310-32ee9f286c71/ovn-northd/0.log" Oct 09 10:12:00 crc kubenswrapper[4710]: I1009 10:12:00.211679 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_958e4ef0-c38c-411d-9893-bac75789df76/openstack-network-exporter/0.log" Oct 09 10:12:00 crc kubenswrapper[4710]: I1009 10:12:00.274110 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_958e4ef0-c38c-411d-9893-bac75789df76/ovsdbserver-nb/0.log" Oct 09 10:12:00 crc kubenswrapper[4710]: I1009 10:12:00.451957 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f/openstack-network-exporter/0.log" Oct 09 10:12:00 crc kubenswrapper[4710]: I1009 10:12:00.461534 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_c31e2d17-d49a-4c8c-8c7a-45c49bfe6d8f/ovsdbserver-sb/0.log" Oct 09 10:12:00 crc kubenswrapper[4710]: I1009 10:12:00.617665 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-685c5ffc88-49dzx_a038f2bb-2364-4a8d-918c-a0776dfa8458/placement-api/0.log" Oct 09 10:12:00 crc kubenswrapper[4710]: I1009 10:12:00.862536 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_3011d32f-6110-456d-a247-f6298b1d46e3/setup-container/0.log" Oct 09 10:12:00 crc kubenswrapper[4710]: I1009 10:12:00.880470 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-685c5ffc88-49dzx_a038f2bb-2364-4a8d-918c-a0776dfa8458/placement-log/0.log" Oct 09 10:12:01 crc kubenswrapper[4710]: I1009 10:12:01.103797 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_3011d32f-6110-456d-a247-f6298b1d46e3/setup-container/0.log" Oct 09 10:12:01 crc kubenswrapper[4710]: I1009 10:12:01.166168 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_818b872a-e3f5-475f-ac6d-99810ac2f39b/setup-container/0.log" Oct 09 10:12:01 crc kubenswrapper[4710]: I1009 10:12:01.256734 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_3011d32f-6110-456d-a247-f6298b1d46e3/rabbitmq/0.log" Oct 09 10:12:01 crc kubenswrapper[4710]: I1009 10:12:01.416542 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_818b872a-e3f5-475f-ac6d-99810ac2f39b/setup-container/0.log" Oct 09 10:12:01 crc kubenswrapper[4710]: I1009 10:12:01.535363 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_818b872a-e3f5-475f-ac6d-99810ac2f39b/rabbitmq/0.log" Oct 09 10:12:01 crc kubenswrapper[4710]: I1009 10:12:01.548707 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-z7wbs_cdbaf739-8dd9-457f-97a5-8ddbcff386ea/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:12:01 crc kubenswrapper[4710]: I1009 10:12:01.782869 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-rbshv_0394728a-d605-415c-afcf-5f52e6b3bcac/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:12:01 crc kubenswrapper[4710]: I1009 10:12:01.859045 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-296ks_3015dc52-bcf5-444b-9200-82a3f79b0fcb/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:12:02 crc kubenswrapper[4710]: I1009 10:12:02.074763 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-6kwm8_9910b475-f4c5-49db-b431-b7214908cf77/ssh-known-hosts-edpm-deployment/0.log" Oct 09 10:12:02 crc kubenswrapper[4710]: I1009 10:12:02.266294 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_36f8a9d9-4342-4b4f-921b-a0acbe1215db/tempest-tests-tempest-tests-runner/0.log" Oct 09 10:12:02 crc kubenswrapper[4710]: I1009 10:12:02.424931 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_b4ec062d-c82a-4a34-801c-4a290d15c32e/test-operator-logs-container/0.log" Oct 09 10:12:02 crc kubenswrapper[4710]: I1009 10:12:02.578568 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-kmfnc_08cf9289-9c53-4831-9bf9-3e0b70a457d5/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 09 10:12:05 crc kubenswrapper[4710]: I1009 10:12:05.546349 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 10:12:05 crc kubenswrapper[4710]: I1009 10:12:05.548720 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 10:12:14 crc kubenswrapper[4710]: I1009 10:12:14.327316 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_163b8a88-dc31-4540-a39b-bfecc81ce8aa/memcached/0.log" Oct 09 10:12:29 crc kubenswrapper[4710]: I1009 10:12:29.594064 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt_cfbeb551-9915-4071-a67c-5a88443100f1/util/0.log" Oct 09 10:12:29 crc kubenswrapper[4710]: I1009 10:12:29.879621 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt_cfbeb551-9915-4071-a67c-5a88443100f1/pull/0.log" Oct 09 10:12:29 crc kubenswrapper[4710]: I1009 10:12:29.889559 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt_cfbeb551-9915-4071-a67c-5a88443100f1/util/0.log" Oct 09 10:12:29 crc kubenswrapper[4710]: I1009 10:12:29.945569 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt_cfbeb551-9915-4071-a67c-5a88443100f1/pull/0.log" Oct 09 10:12:30 crc kubenswrapper[4710]: I1009 10:12:30.099136 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt_cfbeb551-9915-4071-a67c-5a88443100f1/pull/0.log" Oct 09 10:12:30 crc kubenswrapper[4710]: I1009 10:12:30.130553 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt_cfbeb551-9915-4071-a67c-5a88443100f1/util/0.log" Oct 09 10:12:30 crc kubenswrapper[4710]: I1009 10:12:30.194327 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_184bca519f21fd0fd55ec08aa4c93472bde537c6b6fd14be86cecc7c99xxtvt_cfbeb551-9915-4071-a67c-5a88443100f1/extract/0.log" Oct 09 10:12:30 crc kubenswrapper[4710]: I1009 10:12:30.382297 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-658bdf4b74-mvjkw_8437ff8a-3892-464b-963b-d5afaf9599dc/manager/0.log" Oct 09 10:12:30 crc kubenswrapper[4710]: I1009 10:12:30.409916 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-658bdf4b74-mvjkw_8437ff8a-3892-464b-963b-d5afaf9599dc/kube-rbac-proxy/0.log" Oct 09 10:12:30 crc kubenswrapper[4710]: I1009 10:12:30.449832 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7b7fb68549-7t5cw_26ad6e31-7002-4043-a971-aa507f4118bf/kube-rbac-proxy/0.log" Oct 09 10:12:31 crc kubenswrapper[4710]: I1009 10:12:31.138200 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7b7fb68549-7t5cw_26ad6e31-7002-4043-a971-aa507f4118bf/manager/0.log" Oct 09 10:12:31 crc kubenswrapper[4710]: I1009 10:12:31.140191 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-85d5d9dd78-qq9g6_5fc82f31-455f-4960-8538-5315e1a3a09a/kube-rbac-proxy/0.log" Oct 09 10:12:31 crc kubenswrapper[4710]: I1009 10:12:31.197380 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-85d5d9dd78-qq9g6_5fc82f31-455f-4960-8538-5315e1a3a09a/manager/0.log" Oct 09 10:12:31 crc kubenswrapper[4710]: I1009 10:12:31.370719 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84b9b84486-4szsc_10de6ce0-7aa9-471d-8b90-c44fb9bb0ab6/manager/0.log" Oct 09 10:12:31 crc kubenswrapper[4710]: I1009 10:12:31.396384 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84b9b84486-4szsc_10de6ce0-7aa9-471d-8b90-c44fb9bb0ab6/kube-rbac-proxy/0.log" Oct 09 10:12:31 crc kubenswrapper[4710]: I1009 10:12:31.578264 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-858f76bbdd-qhvrk_1438d002-6055-453b-8a7a-c83888b37429/kube-rbac-proxy/0.log" Oct 09 10:12:31 crc kubenswrapper[4710]: I1009 10:12:31.588218 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-858f76bbdd-qhvrk_1438d002-6055-453b-8a7a-c83888b37429/manager/0.log" Oct 09 10:12:31 crc kubenswrapper[4710]: I1009 10:12:31.647500 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-7ffbcb7588-svtkg_3b555e01-0210-431d-83ab-97ebcc53a68b/kube-rbac-proxy/0.log" Oct 09 10:12:32 crc kubenswrapper[4710]: I1009 10:12:32.343420 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-7ffbcb7588-svtkg_3b555e01-0210-431d-83ab-97ebcc53a68b/manager/0.log" Oct 09 10:12:32 crc kubenswrapper[4710]: I1009 10:12:32.373073 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-656bcbd775-m95bh_84815f80-0c57-4246-abe3-7c54bd77d1c1/kube-rbac-proxy/0.log" Oct 09 10:12:32 crc kubenswrapper[4710]: I1009 10:12:32.470549 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-656bcbd775-m95bh_84815f80-0c57-4246-abe3-7c54bd77d1c1/manager/0.log" Oct 09 10:12:32 crc kubenswrapper[4710]: I1009 10:12:32.638321 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-9c5c78d49-dhvdq_21d3fd5d-1f17-45d0-bf73-59fdc7211820/manager/0.log" Oct 09 10:12:32 crc kubenswrapper[4710]: I1009 10:12:32.736514 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-9c5c78d49-dhvdq_21d3fd5d-1f17-45d0-bf73-59fdc7211820/kube-rbac-proxy/0.log" Oct 09 10:12:32 crc kubenswrapper[4710]: I1009 10:12:32.890537 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-55b6b7c7b8-jb2h2_f78f287b-b34d-40c0-ad99-caaf90bc2ae7/kube-rbac-proxy/0.log" Oct 09 10:12:33 crc kubenswrapper[4710]: I1009 10:12:33.204651 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5f67fbc655-2htjv_fa97dde8-95a7-4c4b-820d-d889545d79d5/manager/0.log" Oct 09 10:12:33 crc kubenswrapper[4710]: I1009 10:12:33.231987 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-55b6b7c7b8-jb2h2_f78f287b-b34d-40c0-ad99-caaf90bc2ae7/manager/0.log" Oct 09 10:12:33 crc kubenswrapper[4710]: I1009 10:12:33.236069 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5f67fbc655-2htjv_fa97dde8-95a7-4c4b-820d-d889545d79d5/kube-rbac-proxy/0.log" Oct 09 10:12:33 crc kubenswrapper[4710]: I1009 10:12:33.359875 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-f9fb45f8f-6lbkw_72cf445d-90ac-4c98-a1df-3a3a4c2f0c7a/kube-rbac-proxy/0.log" Oct 09 10:12:33 crc kubenswrapper[4710]: I1009 10:12:33.481221 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-f9fb45f8f-6lbkw_72cf445d-90ac-4c98-a1df-3a3a4c2f0c7a/manager/0.log" Oct 09 10:12:33 crc kubenswrapper[4710]: I1009 10:12:33.512177 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-79d585cb66-d7sqz_d6f9aa15-3eb4-403a-b4fc-4af18c14d4bf/kube-rbac-proxy/0.log" Oct 09 10:12:33 crc kubenswrapper[4710]: I1009 10:12:33.522519 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-79d585cb66-d7sqz_d6f9aa15-3eb4-403a-b4fc-4af18c14d4bf/manager/0.log" Oct 09 10:12:33 crc kubenswrapper[4710]: I1009 10:12:33.729008 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-5df598886f-sd5xk_51113172-27cd-47a3-8bc2-b751cb1654f7/kube-rbac-proxy/0.log" Oct 09 10:12:33 crc kubenswrapper[4710]: I1009 10:12:33.767611 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-69fdcfc5f5-nmmkl_addc94b4-bdbb-4a05-993d-5a7ac2bb3e19/kube-rbac-proxy/0.log" Oct 09 10:12:33 crc kubenswrapper[4710]: I1009 10:12:33.819153 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-5df598886f-sd5xk_51113172-27cd-47a3-8bc2-b751cb1654f7/manager/0.log" Oct 09 10:12:33 crc kubenswrapper[4710]: I1009 10:12:33.909343 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-69fdcfc5f5-nmmkl_addc94b4-bdbb-4a05-993d-5a7ac2bb3e19/manager/0.log" Oct 09 10:12:33 crc kubenswrapper[4710]: I1009 10:12:33.981240 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj_cce3973b-b375-4ea1-b907-0f46e330dfae/manager/0.log" Oct 09 10:12:33 crc kubenswrapper[4710]: I1009 10:12:33.986497 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-677c5f5bffbgjzj_cce3973b-b375-4ea1-b907-0f46e330dfae/kube-rbac-proxy/0.log" Oct 09 10:12:34 crc kubenswrapper[4710]: I1009 10:12:34.094362 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7d6957655c-6trjl_eda25b03-4fb3-4ace-803c-1d1800196995/kube-rbac-proxy/0.log" Oct 09 10:12:34 crc kubenswrapper[4710]: I1009 10:12:34.238279 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-848c57cb5c-lrs49_f23bbd5e-3d87-4396-aad0-9455c284fbf8/kube-rbac-proxy/0.log" Oct 09 10:12:34 crc kubenswrapper[4710]: I1009 10:12:34.461367 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-848c57cb5c-lrs49_f23bbd5e-3d87-4396-aad0-9455c284fbf8/operator/0.log" Oct 09 10:12:34 crc kubenswrapper[4710]: I1009 10:12:34.636650 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-nlptw_ea70cec4-b9bc-48b8-8871-034a6d5b392a/registry-server/0.log" Oct 09 10:12:34 crc kubenswrapper[4710]: I1009 10:12:34.685531 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-79db49b9fb-vnvl6_4a08f5f1-bab4-425c-b81c-b48f2d4a186b/kube-rbac-proxy/0.log" Oct 09 10:12:34 crc kubenswrapper[4710]: I1009 10:12:34.888790 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-79db49b9fb-vnvl6_4a08f5f1-bab4-425c-b81c-b48f2d4a186b/manager/0.log" Oct 09 10:12:34 crc kubenswrapper[4710]: I1009 10:12:34.996557 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-68b6c87b68-lghdj_2e6376a2-edb7-4958-b3b3-3a6773782349/kube-rbac-proxy/0.log" Oct 09 10:12:35 crc kubenswrapper[4710]: I1009 10:12:35.065093 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-68b6c87b68-lghdj_2e6376a2-edb7-4958-b3b3-3a6773782349/manager/0.log" Oct 09 10:12:35 crc kubenswrapper[4710]: I1009 10:12:35.141354 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-292ft_eb73b966-fd38-499c-a018-d28ad9acda92/operator/0.log" Oct 09 10:12:35 crc kubenswrapper[4710]: I1009 10:12:35.327708 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7d6957655c-6trjl_eda25b03-4fb3-4ace-803c-1d1800196995/manager/0.log" Oct 09 10:12:35 crc kubenswrapper[4710]: I1009 10:12:35.367662 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-db6d7f97b-rh8n6_9f1f2915-bd9f-496f-a513-e1fd022ee463/kube-rbac-proxy/0.log" Oct 09 10:12:35 crc kubenswrapper[4710]: I1009 10:12:35.411666 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-db6d7f97b-rh8n6_9f1f2915-bd9f-496f-a513-e1fd022ee463/manager/0.log" Oct 09 10:12:35 crc kubenswrapper[4710]: I1009 10:12:35.483571 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-67cfc6749b-t69gh_2c55fbcc-5995-4a59-b8ae-dc8be7411fa8/kube-rbac-proxy/0.log" Oct 09 10:12:35 crc kubenswrapper[4710]: I1009 10:12:35.546026 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 10:12:35 crc kubenswrapper[4710]: I1009 10:12:35.546079 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 10:12:35 crc kubenswrapper[4710]: I1009 10:12:35.601924 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-67cfc6749b-t69gh_2c55fbcc-5995-4a59-b8ae-dc8be7411fa8/manager/0.log" Oct 09 10:12:35 crc kubenswrapper[4710]: I1009 10:12:35.665587 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5458f77c4-fzwm9_b8dd2f89-b87d-4669-8c6e-7c8035b6fcd3/kube-rbac-proxy/0.log" Oct 09 10:12:35 crc kubenswrapper[4710]: I1009 10:12:35.720689 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5458f77c4-fzwm9_b8dd2f89-b87d-4669-8c6e-7c8035b6fcd3/manager/0.log" Oct 09 10:12:35 crc kubenswrapper[4710]: I1009 10:12:35.825613 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-7f554bff7b-759pl_94e1d5bc-92ce-4e9a-b0c7-bef14a881f9e/manager/0.log" Oct 09 10:12:35 crc kubenswrapper[4710]: I1009 10:12:35.836634 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-7f554bff7b-759pl_94e1d5bc-92ce-4e9a-b0c7-bef14a881f9e/kube-rbac-proxy/0.log" Oct 09 10:12:50 crc kubenswrapper[4710]: I1009 10:12:50.488905 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-fkhwq_51378278-0202-4be1-96a8-28f4c81a6aae/control-plane-machine-set-operator/0.log" Oct 09 10:12:50 crc kubenswrapper[4710]: I1009 10:12:50.613942 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-2k94j_c3193888-6214-44cb-a0bc-0091046b80c2/kube-rbac-proxy/0.log" Oct 09 10:12:50 crc kubenswrapper[4710]: I1009 10:12:50.668255 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-2k94j_c3193888-6214-44cb-a0bc-0091046b80c2/machine-api-operator/0.log" Oct 09 10:13:02 crc kubenswrapper[4710]: I1009 10:13:02.432364 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-7mwtc_4a0738a0-347d-48d6-a47e-52e13d52664d/cert-manager-controller/0.log" Oct 09 10:13:02 crc kubenswrapper[4710]: I1009 10:13:02.550647 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-hgq5g_878c656c-1bab-4b97-8267-910f8890946a/cert-manager-webhook/0.log" Oct 09 10:13:02 crc kubenswrapper[4710]: I1009 10:13:02.599518 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-dx5ll_1631e6bf-cf27-4c97-ab40-8b2170648070/cert-manager-cainjector/0.log" Oct 09 10:13:05 crc kubenswrapper[4710]: I1009 10:13:05.545998 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 10:13:05 crc kubenswrapper[4710]: I1009 10:13:05.546591 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 10:13:05 crc kubenswrapper[4710]: I1009 10:13:05.546634 4710 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 10:13:05 crc kubenswrapper[4710]: I1009 10:13:05.547618 4710 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4f3bd091b62a5d6a8bcde1dece4c98d7c1dbcf0291047b04b5db8b6a2857e401"} pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 10:13:05 crc kubenswrapper[4710]: I1009 10:13:05.547677 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" containerID="cri-o://4f3bd091b62a5d6a8bcde1dece4c98d7c1dbcf0291047b04b5db8b6a2857e401" gracePeriod=600 Oct 09 10:13:05 crc kubenswrapper[4710]: I1009 10:13:05.875981 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-m29f8"] Oct 09 10:13:05 crc kubenswrapper[4710]: E1009 10:13:05.876866 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="674b5100-5dbd-46b7-b857-77bb2c1681ab" containerName="extract-content" Oct 09 10:13:05 crc kubenswrapper[4710]: I1009 10:13:05.876890 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="674b5100-5dbd-46b7-b857-77bb2c1681ab" containerName="extract-content" Oct 09 10:13:05 crc kubenswrapper[4710]: E1009 10:13:05.876918 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="674b5100-5dbd-46b7-b857-77bb2c1681ab" containerName="extract-utilities" Oct 09 10:13:05 crc kubenswrapper[4710]: I1009 10:13:05.876925 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="674b5100-5dbd-46b7-b857-77bb2c1681ab" containerName="extract-utilities" Oct 09 10:13:05 crc kubenswrapper[4710]: E1009 10:13:05.876938 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="674b5100-5dbd-46b7-b857-77bb2c1681ab" containerName="registry-server" Oct 09 10:13:05 crc kubenswrapper[4710]: I1009 10:13:05.876943 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="674b5100-5dbd-46b7-b857-77bb2c1681ab" containerName="registry-server" Oct 09 10:13:05 crc kubenswrapper[4710]: I1009 10:13:05.888365 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="674b5100-5dbd-46b7-b857-77bb2c1681ab" containerName="registry-server" Oct 09 10:13:05 crc kubenswrapper[4710]: I1009 10:13:05.889492 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m29f8"] Oct 09 10:13:05 crc kubenswrapper[4710]: I1009 10:13:05.889580 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m29f8" Oct 09 10:13:05 crc kubenswrapper[4710]: I1009 10:13:05.893836 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pfq2\" (UniqueName: \"kubernetes.io/projected/529736b9-6586-4aa2-af0b-03c564bc6dd7-kube-api-access-6pfq2\") pod \"redhat-operators-m29f8\" (UID: \"529736b9-6586-4aa2-af0b-03c564bc6dd7\") " pod="openshift-marketplace/redhat-operators-m29f8" Oct 09 10:13:05 crc kubenswrapper[4710]: I1009 10:13:05.893906 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/529736b9-6586-4aa2-af0b-03c564bc6dd7-utilities\") pod \"redhat-operators-m29f8\" (UID: \"529736b9-6586-4aa2-af0b-03c564bc6dd7\") " pod="openshift-marketplace/redhat-operators-m29f8" Oct 09 10:13:05 crc kubenswrapper[4710]: I1009 10:13:05.894045 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/529736b9-6586-4aa2-af0b-03c564bc6dd7-catalog-content\") pod \"redhat-operators-m29f8\" (UID: \"529736b9-6586-4aa2-af0b-03c564bc6dd7\") " pod="openshift-marketplace/redhat-operators-m29f8" Oct 09 10:13:05 crc kubenswrapper[4710]: I1009 10:13:05.996340 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/529736b9-6586-4aa2-af0b-03c564bc6dd7-utilities\") pod \"redhat-operators-m29f8\" (UID: \"529736b9-6586-4aa2-af0b-03c564bc6dd7\") " pod="openshift-marketplace/redhat-operators-m29f8" Oct 09 10:13:05 crc kubenswrapper[4710]: I1009 10:13:05.996806 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/529736b9-6586-4aa2-af0b-03c564bc6dd7-utilities\") pod \"redhat-operators-m29f8\" (UID: \"529736b9-6586-4aa2-af0b-03c564bc6dd7\") " pod="openshift-marketplace/redhat-operators-m29f8" Oct 09 10:13:05 crc kubenswrapper[4710]: I1009 10:13:05.996944 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/529736b9-6586-4aa2-af0b-03c564bc6dd7-catalog-content\") pod \"redhat-operators-m29f8\" (UID: \"529736b9-6586-4aa2-af0b-03c564bc6dd7\") " pod="openshift-marketplace/redhat-operators-m29f8" Oct 09 10:13:05 crc kubenswrapper[4710]: I1009 10:13:05.997104 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pfq2\" (UniqueName: \"kubernetes.io/projected/529736b9-6586-4aa2-af0b-03c564bc6dd7-kube-api-access-6pfq2\") pod \"redhat-operators-m29f8\" (UID: \"529736b9-6586-4aa2-af0b-03c564bc6dd7\") " pod="openshift-marketplace/redhat-operators-m29f8" Oct 09 10:13:05 crc kubenswrapper[4710]: I1009 10:13:05.997268 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/529736b9-6586-4aa2-af0b-03c564bc6dd7-catalog-content\") pod \"redhat-operators-m29f8\" (UID: \"529736b9-6586-4aa2-af0b-03c564bc6dd7\") " pod="openshift-marketplace/redhat-operators-m29f8" Oct 09 10:13:06 crc kubenswrapper[4710]: I1009 10:13:06.013627 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pfq2\" (UniqueName: \"kubernetes.io/projected/529736b9-6586-4aa2-af0b-03c564bc6dd7-kube-api-access-6pfq2\") pod \"redhat-operators-m29f8\" (UID: \"529736b9-6586-4aa2-af0b-03c564bc6dd7\") " pod="openshift-marketplace/redhat-operators-m29f8" Oct 09 10:13:06 crc kubenswrapper[4710]: I1009 10:13:06.207541 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m29f8" Oct 09 10:13:06 crc kubenswrapper[4710]: I1009 10:13:06.310515 4710 generic.go:334] "Generic (PLEG): container finished" podID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerID="4f3bd091b62a5d6a8bcde1dece4c98d7c1dbcf0291047b04b5db8b6a2857e401" exitCode=0 Oct 09 10:13:06 crc kubenswrapper[4710]: I1009 10:13:06.310559 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerDied","Data":"4f3bd091b62a5d6a8bcde1dece4c98d7c1dbcf0291047b04b5db8b6a2857e401"} Oct 09 10:13:06 crc kubenswrapper[4710]: I1009 10:13:06.310584 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerStarted","Data":"6f37f9bd48e2f76920234e0d8c3450eecdb8757df8b6ad92d401c06c81106d59"} Oct 09 10:13:06 crc kubenswrapper[4710]: I1009 10:13:06.310600 4710 scope.go:117] "RemoveContainer" containerID="a12a50b36fb4ed0373ede7c18d29489b77177ff5febff09d5a471f763fd6da5d" Oct 09 10:13:06 crc kubenswrapper[4710]: I1009 10:13:06.674362 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m29f8"] Oct 09 10:13:07 crc kubenswrapper[4710]: I1009 10:13:07.317706 4710 generic.go:334] "Generic (PLEG): container finished" podID="529736b9-6586-4aa2-af0b-03c564bc6dd7" containerID="fc50dcee9774ef0d6b1cdfda78388d27ee62383baa12ee049e0812b8d72f94bb" exitCode=0 Oct 09 10:13:07 crc kubenswrapper[4710]: I1009 10:13:07.317880 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m29f8" event={"ID":"529736b9-6586-4aa2-af0b-03c564bc6dd7","Type":"ContainerDied","Data":"fc50dcee9774ef0d6b1cdfda78388d27ee62383baa12ee049e0812b8d72f94bb"} Oct 09 10:13:07 crc kubenswrapper[4710]: I1009 10:13:07.317924 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m29f8" event={"ID":"529736b9-6586-4aa2-af0b-03c564bc6dd7","Type":"ContainerStarted","Data":"8b30106d1b28937d795956f657836321adb5416650b82510302d1ff5a5e031d0"} Oct 09 10:13:09 crc kubenswrapper[4710]: I1009 10:13:09.335595 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m29f8" event={"ID":"529736b9-6586-4aa2-af0b-03c564bc6dd7","Type":"ContainerStarted","Data":"1fa55824a425338f47e2a0f146bff0ff70697de74c35058887d800346c469eab"} Oct 09 10:13:11 crc kubenswrapper[4710]: I1009 10:13:11.350333 4710 generic.go:334] "Generic (PLEG): container finished" podID="529736b9-6586-4aa2-af0b-03c564bc6dd7" containerID="1fa55824a425338f47e2a0f146bff0ff70697de74c35058887d800346c469eab" exitCode=0 Oct 09 10:13:11 crc kubenswrapper[4710]: I1009 10:13:11.350389 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m29f8" event={"ID":"529736b9-6586-4aa2-af0b-03c564bc6dd7","Type":"ContainerDied","Data":"1fa55824a425338f47e2a0f146bff0ff70697de74c35058887d800346c469eab"} Oct 09 10:13:12 crc kubenswrapper[4710]: I1009 10:13:12.359219 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m29f8" event={"ID":"529736b9-6586-4aa2-af0b-03c564bc6dd7","Type":"ContainerStarted","Data":"70e7484b0249283e02a8793da92423966c4a5fe4346241214cb89394b90da0cb"} Oct 09 10:13:12 crc kubenswrapper[4710]: I1009 10:13:12.373490 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-m29f8" podStartSLOduration=2.798296915 podStartE2EDuration="7.373476049s" podCreationTimestamp="2025-10-09 10:13:05 +0000 UTC" firstStartedPulling="2025-10-09 10:13:07.319664837 +0000 UTC m=+4110.809773235" lastFinishedPulling="2025-10-09 10:13:11.894843972 +0000 UTC m=+4115.384952369" observedRunningTime="2025-10-09 10:13:12.372299781 +0000 UTC m=+4115.862408178" watchObservedRunningTime="2025-10-09 10:13:12.373476049 +0000 UTC m=+4115.863584446" Oct 09 10:13:13 crc kubenswrapper[4710]: I1009 10:13:13.465697 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-ptzlb_dd0f6b60-657a-4d34-a175-74f88f730669/nmstate-console-plugin/0.log" Oct 09 10:13:13 crc kubenswrapper[4710]: I1009 10:13:13.638516 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-zcxkk_f174bd18-14b5-495f-8d34-795eca72dc06/nmstate-handler/0.log" Oct 09 10:13:13 crc kubenswrapper[4710]: I1009 10:13:13.752935 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-bcx7j_0899a565-e72a-498c-9071-7b05ccb027bd/kube-rbac-proxy/0.log" Oct 09 10:13:13 crc kubenswrapper[4710]: I1009 10:13:13.758967 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-bcx7j_0899a565-e72a-498c-9071-7b05ccb027bd/nmstate-metrics/0.log" Oct 09 10:13:13 crc kubenswrapper[4710]: I1009 10:13:13.932039 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-c98j9_35322d9f-0b0d-40a7-b13c-7763f5027a59/nmstate-operator/0.log" Oct 09 10:13:14 crc kubenswrapper[4710]: I1009 10:13:14.024309 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-sfv4r_89eda8ab-752d-4dc4-af4a-009431208f96/nmstate-webhook/0.log" Oct 09 10:13:16 crc kubenswrapper[4710]: I1009 10:13:16.208082 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-m29f8" Oct 09 10:13:16 crc kubenswrapper[4710]: I1009 10:13:16.208340 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-m29f8" Oct 09 10:13:16 crc kubenswrapper[4710]: I1009 10:13:16.611520 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-m29f8" Oct 09 10:13:26 crc kubenswrapper[4710]: I1009 10:13:26.261706 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-m29f8" Oct 09 10:13:26 crc kubenswrapper[4710]: I1009 10:13:26.304029 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-fjp44_74b53642-04a4-4331-806b-c9f84d190746/kube-rbac-proxy/0.log" Oct 09 10:13:26 crc kubenswrapper[4710]: I1009 10:13:26.304779 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m29f8"] Oct 09 10:13:26 crc kubenswrapper[4710]: I1009 10:13:26.420498 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-fjp44_74b53642-04a4-4331-806b-c9f84d190746/controller/0.log" Oct 09 10:13:26 crc kubenswrapper[4710]: I1009 10:13:26.446456 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-m29f8" podUID="529736b9-6586-4aa2-af0b-03c564bc6dd7" containerName="registry-server" containerID="cri-o://70e7484b0249283e02a8793da92423966c4a5fe4346241214cb89394b90da0cb" gracePeriod=2 Oct 09 10:13:26 crc kubenswrapper[4710]: I1009 10:13:26.532537 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-frr-files/0.log" Oct 09 10:13:26 crc kubenswrapper[4710]: I1009 10:13:26.730712 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-reloader/0.log" Oct 09 10:13:26 crc kubenswrapper[4710]: I1009 10:13:26.732820 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-reloader/0.log" Oct 09 10:13:26 crc kubenswrapper[4710]: I1009 10:13:26.748505 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-frr-files/0.log" Oct 09 10:13:26 crc kubenswrapper[4710]: I1009 10:13:26.776781 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-metrics/0.log" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.019963 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-metrics/0.log" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.040169 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-frr-files/0.log" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.048481 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-metrics/0.log" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.059329 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-reloader/0.log" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.097003 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m29f8" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.185112 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6pfq2\" (UniqueName: \"kubernetes.io/projected/529736b9-6586-4aa2-af0b-03c564bc6dd7-kube-api-access-6pfq2\") pod \"529736b9-6586-4aa2-af0b-03c564bc6dd7\" (UID: \"529736b9-6586-4aa2-af0b-03c564bc6dd7\") " Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.185363 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/529736b9-6586-4aa2-af0b-03c564bc6dd7-utilities\") pod \"529736b9-6586-4aa2-af0b-03c564bc6dd7\" (UID: \"529736b9-6586-4aa2-af0b-03c564bc6dd7\") " Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.185394 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/529736b9-6586-4aa2-af0b-03c564bc6dd7-catalog-content\") pod \"529736b9-6586-4aa2-af0b-03c564bc6dd7\" (UID: \"529736b9-6586-4aa2-af0b-03c564bc6dd7\") " Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.186734 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/529736b9-6586-4aa2-af0b-03c564bc6dd7-utilities" (OuterVolumeSpecName: "utilities") pod "529736b9-6586-4aa2-af0b-03c564bc6dd7" (UID: "529736b9-6586-4aa2-af0b-03c564bc6dd7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.224456 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/529736b9-6586-4aa2-af0b-03c564bc6dd7-kube-api-access-6pfq2" (OuterVolumeSpecName: "kube-api-access-6pfq2") pod "529736b9-6586-4aa2-af0b-03c564bc6dd7" (UID: "529736b9-6586-4aa2-af0b-03c564bc6dd7"). InnerVolumeSpecName "kube-api-access-6pfq2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.269771 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-frr-files/0.log" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.270938 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/529736b9-6586-4aa2-af0b-03c564bc6dd7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "529736b9-6586-4aa2-af0b-03c564bc6dd7" (UID: "529736b9-6586-4aa2-af0b-03c564bc6dd7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.288719 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6pfq2\" (UniqueName: \"kubernetes.io/projected/529736b9-6586-4aa2-af0b-03c564bc6dd7-kube-api-access-6pfq2\") on node \"crc\" DevicePath \"\"" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.288743 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/529736b9-6586-4aa2-af0b-03c564bc6dd7-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.288769 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/529736b9-6586-4aa2-af0b-03c564bc6dd7-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.345990 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/controller/0.log" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.353623 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-reloader/0.log" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.355187 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/cp-metrics/0.log" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.466634 4710 generic.go:334] "Generic (PLEG): container finished" podID="529736b9-6586-4aa2-af0b-03c564bc6dd7" containerID="70e7484b0249283e02a8793da92423966c4a5fe4346241214cb89394b90da0cb" exitCode=0 Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.466675 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m29f8" event={"ID":"529736b9-6586-4aa2-af0b-03c564bc6dd7","Type":"ContainerDied","Data":"70e7484b0249283e02a8793da92423966c4a5fe4346241214cb89394b90da0cb"} Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.466701 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m29f8" event={"ID":"529736b9-6586-4aa2-af0b-03c564bc6dd7","Type":"ContainerDied","Data":"8b30106d1b28937d795956f657836321adb5416650b82510302d1ff5a5e031d0"} Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.466721 4710 scope.go:117] "RemoveContainer" containerID="70e7484b0249283e02a8793da92423966c4a5fe4346241214cb89394b90da0cb" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.466865 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m29f8" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.498767 4710 scope.go:117] "RemoveContainer" containerID="1fa55824a425338f47e2a0f146bff0ff70697de74c35058887d800346c469eab" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.517494 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m29f8"] Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.525223 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-m29f8"] Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.534566 4710 scope.go:117] "RemoveContainer" containerID="fc50dcee9774ef0d6b1cdfda78388d27ee62383baa12ee049e0812b8d72f94bb" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.571011 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/kube-rbac-proxy/0.log" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.586171 4710 scope.go:117] "RemoveContainer" containerID="70e7484b0249283e02a8793da92423966c4a5fe4346241214cb89394b90da0cb" Oct 09 10:13:27 crc kubenswrapper[4710]: E1009 10:13:27.587250 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70e7484b0249283e02a8793da92423966c4a5fe4346241214cb89394b90da0cb\": container with ID starting with 70e7484b0249283e02a8793da92423966c4a5fe4346241214cb89394b90da0cb not found: ID does not exist" containerID="70e7484b0249283e02a8793da92423966c4a5fe4346241214cb89394b90da0cb" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.587295 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70e7484b0249283e02a8793da92423966c4a5fe4346241214cb89394b90da0cb"} err="failed to get container status \"70e7484b0249283e02a8793da92423966c4a5fe4346241214cb89394b90da0cb\": rpc error: code = NotFound desc = could not find container \"70e7484b0249283e02a8793da92423966c4a5fe4346241214cb89394b90da0cb\": container with ID starting with 70e7484b0249283e02a8793da92423966c4a5fe4346241214cb89394b90da0cb not found: ID does not exist" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.587316 4710 scope.go:117] "RemoveContainer" containerID="1fa55824a425338f47e2a0f146bff0ff70697de74c35058887d800346c469eab" Oct 09 10:13:27 crc kubenswrapper[4710]: E1009 10:13:27.588468 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fa55824a425338f47e2a0f146bff0ff70697de74c35058887d800346c469eab\": container with ID starting with 1fa55824a425338f47e2a0f146bff0ff70697de74c35058887d800346c469eab not found: ID does not exist" containerID="1fa55824a425338f47e2a0f146bff0ff70697de74c35058887d800346c469eab" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.588509 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fa55824a425338f47e2a0f146bff0ff70697de74c35058887d800346c469eab"} err="failed to get container status \"1fa55824a425338f47e2a0f146bff0ff70697de74c35058887d800346c469eab\": rpc error: code = NotFound desc = could not find container \"1fa55824a425338f47e2a0f146bff0ff70697de74c35058887d800346c469eab\": container with ID starting with 1fa55824a425338f47e2a0f146bff0ff70697de74c35058887d800346c469eab not found: ID does not exist" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.588533 4710 scope.go:117] "RemoveContainer" containerID="fc50dcee9774ef0d6b1cdfda78388d27ee62383baa12ee049e0812b8d72f94bb" Oct 09 10:13:27 crc kubenswrapper[4710]: E1009 10:13:27.588818 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc50dcee9774ef0d6b1cdfda78388d27ee62383baa12ee049e0812b8d72f94bb\": container with ID starting with fc50dcee9774ef0d6b1cdfda78388d27ee62383baa12ee049e0812b8d72f94bb not found: ID does not exist" containerID="fc50dcee9774ef0d6b1cdfda78388d27ee62383baa12ee049e0812b8d72f94bb" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.588854 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc50dcee9774ef0d6b1cdfda78388d27ee62383baa12ee049e0812b8d72f94bb"} err="failed to get container status \"fc50dcee9774ef0d6b1cdfda78388d27ee62383baa12ee049e0812b8d72f94bb\": rpc error: code = NotFound desc = could not find container \"fc50dcee9774ef0d6b1cdfda78388d27ee62383baa12ee049e0812b8d72f94bb\": container with ID starting with fc50dcee9774ef0d6b1cdfda78388d27ee62383baa12ee049e0812b8d72f94bb not found: ID does not exist" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.594739 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/kube-rbac-proxy-frr/0.log" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.610858 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/frr-metrics/0.log" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.811748 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/reloader/0.log" Oct 09 10:13:27 crc kubenswrapper[4710]: I1009 10:13:27.862146 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-t2fhd_f03c49fa-bc71-4bac-b0bb-e25876b0cef7/frr-k8s-webhook-server/0.log" Oct 09 10:13:28 crc kubenswrapper[4710]: I1009 10:13:28.048654 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7cb7c7d4ff-szp6n_95157129-8087-4ba4-9b97-980dc6f6d88d/manager/0.log" Oct 09 10:13:28 crc kubenswrapper[4710]: I1009 10:13:28.338499 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-86bdd9545f-gp42b_89f2b083-5809-4a5b-9c55-75bb2c0807a8/webhook-server/0.log" Oct 09 10:13:28 crc kubenswrapper[4710]: I1009 10:13:28.356335 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-6wvbb_f1dc3816-2a79-4e41-8337-bc61c3bbafc8/kube-rbac-proxy/0.log" Oct 09 10:13:28 crc kubenswrapper[4710]: I1009 10:13:28.537703 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-mqt2m_aab75afc-7b04-4a39-afd5-3109512334d6/frr/0.log" Oct 09 10:13:28 crc kubenswrapper[4710]: I1009 10:13:28.833999 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="529736b9-6586-4aa2-af0b-03c564bc6dd7" path="/var/lib/kubelet/pods/529736b9-6586-4aa2-af0b-03c564bc6dd7/volumes" Oct 09 10:13:28 crc kubenswrapper[4710]: I1009 10:13:28.843812 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-6wvbb_f1dc3816-2a79-4e41-8337-bc61c3bbafc8/speaker/0.log" Oct 09 10:13:35 crc kubenswrapper[4710]: I1009 10:13:35.336651 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-z7vh5"] Oct 09 10:13:35 crc kubenswrapper[4710]: E1009 10:13:35.338621 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="529736b9-6586-4aa2-af0b-03c564bc6dd7" containerName="extract-content" Oct 09 10:13:35 crc kubenswrapper[4710]: I1009 10:13:35.338711 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="529736b9-6586-4aa2-af0b-03c564bc6dd7" containerName="extract-content" Oct 09 10:13:35 crc kubenswrapper[4710]: E1009 10:13:35.338781 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="529736b9-6586-4aa2-af0b-03c564bc6dd7" containerName="registry-server" Oct 09 10:13:35 crc kubenswrapper[4710]: I1009 10:13:35.338828 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="529736b9-6586-4aa2-af0b-03c564bc6dd7" containerName="registry-server" Oct 09 10:13:35 crc kubenswrapper[4710]: E1009 10:13:35.338876 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="529736b9-6586-4aa2-af0b-03c564bc6dd7" containerName="extract-utilities" Oct 09 10:13:35 crc kubenswrapper[4710]: I1009 10:13:35.338928 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="529736b9-6586-4aa2-af0b-03c564bc6dd7" containerName="extract-utilities" Oct 09 10:13:35 crc kubenswrapper[4710]: I1009 10:13:35.339155 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="529736b9-6586-4aa2-af0b-03c564bc6dd7" containerName="registry-server" Oct 09 10:13:35 crc kubenswrapper[4710]: I1009 10:13:35.340394 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z7vh5" Oct 09 10:13:35 crc kubenswrapper[4710]: I1009 10:13:35.342744 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z7vh5"] Oct 09 10:13:35 crc kubenswrapper[4710]: I1009 10:13:35.368699 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05aeeec5-723a-4bb0-b42b-bbb189f8eb9f-catalog-content\") pod \"certified-operators-z7vh5\" (UID: \"05aeeec5-723a-4bb0-b42b-bbb189f8eb9f\") " pod="openshift-marketplace/certified-operators-z7vh5" Oct 09 10:13:35 crc kubenswrapper[4710]: I1009 10:13:35.368937 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05aeeec5-723a-4bb0-b42b-bbb189f8eb9f-utilities\") pod \"certified-operators-z7vh5\" (UID: \"05aeeec5-723a-4bb0-b42b-bbb189f8eb9f\") " pod="openshift-marketplace/certified-operators-z7vh5" Oct 09 10:13:35 crc kubenswrapper[4710]: I1009 10:13:35.369017 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tthg\" (UniqueName: \"kubernetes.io/projected/05aeeec5-723a-4bb0-b42b-bbb189f8eb9f-kube-api-access-5tthg\") pod \"certified-operators-z7vh5\" (UID: \"05aeeec5-723a-4bb0-b42b-bbb189f8eb9f\") " pod="openshift-marketplace/certified-operators-z7vh5" Oct 09 10:13:35 crc kubenswrapper[4710]: I1009 10:13:35.471215 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05aeeec5-723a-4bb0-b42b-bbb189f8eb9f-catalog-content\") pod \"certified-operators-z7vh5\" (UID: \"05aeeec5-723a-4bb0-b42b-bbb189f8eb9f\") " pod="openshift-marketplace/certified-operators-z7vh5" Oct 09 10:13:35 crc kubenswrapper[4710]: I1009 10:13:35.471786 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05aeeec5-723a-4bb0-b42b-bbb189f8eb9f-utilities\") pod \"certified-operators-z7vh5\" (UID: \"05aeeec5-723a-4bb0-b42b-bbb189f8eb9f\") " pod="openshift-marketplace/certified-operators-z7vh5" Oct 09 10:13:35 crc kubenswrapper[4710]: I1009 10:13:35.472096 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tthg\" (UniqueName: \"kubernetes.io/projected/05aeeec5-723a-4bb0-b42b-bbb189f8eb9f-kube-api-access-5tthg\") pod \"certified-operators-z7vh5\" (UID: \"05aeeec5-723a-4bb0-b42b-bbb189f8eb9f\") " pod="openshift-marketplace/certified-operators-z7vh5" Oct 09 10:13:35 crc kubenswrapper[4710]: I1009 10:13:35.473108 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05aeeec5-723a-4bb0-b42b-bbb189f8eb9f-catalog-content\") pod \"certified-operators-z7vh5\" (UID: \"05aeeec5-723a-4bb0-b42b-bbb189f8eb9f\") " pod="openshift-marketplace/certified-operators-z7vh5" Oct 09 10:13:35 crc kubenswrapper[4710]: I1009 10:13:35.473385 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05aeeec5-723a-4bb0-b42b-bbb189f8eb9f-utilities\") pod \"certified-operators-z7vh5\" (UID: \"05aeeec5-723a-4bb0-b42b-bbb189f8eb9f\") " pod="openshift-marketplace/certified-operators-z7vh5" Oct 09 10:13:35 crc kubenswrapper[4710]: I1009 10:13:35.495070 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tthg\" (UniqueName: \"kubernetes.io/projected/05aeeec5-723a-4bb0-b42b-bbb189f8eb9f-kube-api-access-5tthg\") pod \"certified-operators-z7vh5\" (UID: \"05aeeec5-723a-4bb0-b42b-bbb189f8eb9f\") " pod="openshift-marketplace/certified-operators-z7vh5" Oct 09 10:13:35 crc kubenswrapper[4710]: I1009 10:13:35.672011 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z7vh5" Oct 09 10:13:36 crc kubenswrapper[4710]: I1009 10:13:36.168976 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z7vh5"] Oct 09 10:13:36 crc kubenswrapper[4710]: I1009 10:13:36.531272 4710 generic.go:334] "Generic (PLEG): container finished" podID="05aeeec5-723a-4bb0-b42b-bbb189f8eb9f" containerID="b044d3af8bdf10d85ac913b29d566ab0ab67154c4b08b55d8597f26a0cbcc628" exitCode=0 Oct 09 10:13:36 crc kubenswrapper[4710]: I1009 10:13:36.531327 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z7vh5" event={"ID":"05aeeec5-723a-4bb0-b42b-bbb189f8eb9f","Type":"ContainerDied","Data":"b044d3af8bdf10d85ac913b29d566ab0ab67154c4b08b55d8597f26a0cbcc628"} Oct 09 10:13:36 crc kubenswrapper[4710]: I1009 10:13:36.531352 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z7vh5" event={"ID":"05aeeec5-723a-4bb0-b42b-bbb189f8eb9f","Type":"ContainerStarted","Data":"28011523eb1532102826ac5b8b66df89716429202e3c19e0413e10f9e4a295b2"} Oct 09 10:13:37 crc kubenswrapper[4710]: I1009 10:13:37.540577 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z7vh5" event={"ID":"05aeeec5-723a-4bb0-b42b-bbb189f8eb9f","Type":"ContainerStarted","Data":"f4f263308f983c33d84b96b243a6c8a9d51923baa935b452add943fb29ce86ad"} Oct 09 10:13:38 crc kubenswrapper[4710]: I1009 10:13:38.548691 4710 generic.go:334] "Generic (PLEG): container finished" podID="05aeeec5-723a-4bb0-b42b-bbb189f8eb9f" containerID="f4f263308f983c33d84b96b243a6c8a9d51923baa935b452add943fb29ce86ad" exitCode=0 Oct 09 10:13:38 crc kubenswrapper[4710]: I1009 10:13:38.548856 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z7vh5" event={"ID":"05aeeec5-723a-4bb0-b42b-bbb189f8eb9f","Type":"ContainerDied","Data":"f4f263308f983c33d84b96b243a6c8a9d51923baa935b452add943fb29ce86ad"} Oct 09 10:13:39 crc kubenswrapper[4710]: I1009 10:13:39.556700 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z7vh5" event={"ID":"05aeeec5-723a-4bb0-b42b-bbb189f8eb9f","Type":"ContainerStarted","Data":"11d7ba0dff345d6da4c57ec43616aa4644be552625843fc04b5ef9889b9c816c"} Oct 09 10:13:39 crc kubenswrapper[4710]: I1009 10:13:39.577784 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-z7vh5" podStartSLOduration=2.03226521 podStartE2EDuration="4.577767226s" podCreationTimestamp="2025-10-09 10:13:35 +0000 UTC" firstStartedPulling="2025-10-09 10:13:36.533333397 +0000 UTC m=+4140.023441794" lastFinishedPulling="2025-10-09 10:13:39.078835412 +0000 UTC m=+4142.568943810" observedRunningTime="2025-10-09 10:13:39.572540339 +0000 UTC m=+4143.062648737" watchObservedRunningTime="2025-10-09 10:13:39.577767226 +0000 UTC m=+4143.067875623" Oct 09 10:13:41 crc kubenswrapper[4710]: I1009 10:13:41.150989 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7_372d2b61-0177-4956-bcab-23bee5bfd490/util/0.log" Oct 09 10:13:41 crc kubenswrapper[4710]: I1009 10:13:41.337084 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7_372d2b61-0177-4956-bcab-23bee5bfd490/pull/0.log" Oct 09 10:13:41 crc kubenswrapper[4710]: I1009 10:13:41.341387 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7_372d2b61-0177-4956-bcab-23bee5bfd490/pull/0.log" Oct 09 10:13:41 crc kubenswrapper[4710]: I1009 10:13:41.395863 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7_372d2b61-0177-4956-bcab-23bee5bfd490/util/0.log" Oct 09 10:13:41 crc kubenswrapper[4710]: I1009 10:13:41.536392 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7_372d2b61-0177-4956-bcab-23bee5bfd490/util/0.log" Oct 09 10:13:41 crc kubenswrapper[4710]: I1009 10:13:41.572629 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7_372d2b61-0177-4956-bcab-23bee5bfd490/extract/0.log" Oct 09 10:13:41 crc kubenswrapper[4710]: I1009 10:13:41.622735 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d282js7_372d2b61-0177-4956-bcab-23bee5bfd490/pull/0.log" Oct 09 10:13:41 crc kubenswrapper[4710]: I1009 10:13:41.708084 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gfskr_f63018af-034e-4f0a-ab7e-fd508ba8bd25/extract-utilities/0.log" Oct 09 10:13:41 crc kubenswrapper[4710]: I1009 10:13:41.881288 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gfskr_f63018af-034e-4f0a-ab7e-fd508ba8bd25/extract-utilities/0.log" Oct 09 10:13:41 crc kubenswrapper[4710]: I1009 10:13:41.883700 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gfskr_f63018af-034e-4f0a-ab7e-fd508ba8bd25/extract-content/0.log" Oct 09 10:13:41 crc kubenswrapper[4710]: I1009 10:13:41.924949 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gfskr_f63018af-034e-4f0a-ab7e-fd508ba8bd25/extract-content/0.log" Oct 09 10:13:42 crc kubenswrapper[4710]: I1009 10:13:42.056094 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gfskr_f63018af-034e-4f0a-ab7e-fd508ba8bd25/extract-content/0.log" Oct 09 10:13:42 crc kubenswrapper[4710]: I1009 10:13:42.084951 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gfskr_f63018af-034e-4f0a-ab7e-fd508ba8bd25/extract-utilities/0.log" Oct 09 10:13:42 crc kubenswrapper[4710]: I1009 10:13:42.335806 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z7vh5_05aeeec5-723a-4bb0-b42b-bbb189f8eb9f/extract-utilities/0.log" Oct 09 10:13:42 crc kubenswrapper[4710]: I1009 10:13:42.511692 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-gfskr_f63018af-034e-4f0a-ab7e-fd508ba8bd25/registry-server/0.log" Oct 09 10:13:42 crc kubenswrapper[4710]: I1009 10:13:42.566681 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z7vh5_05aeeec5-723a-4bb0-b42b-bbb189f8eb9f/extract-content/0.log" Oct 09 10:13:42 crc kubenswrapper[4710]: I1009 10:13:42.592108 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z7vh5_05aeeec5-723a-4bb0-b42b-bbb189f8eb9f/extract-utilities/0.log" Oct 09 10:13:42 crc kubenswrapper[4710]: I1009 10:13:42.605412 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z7vh5_05aeeec5-723a-4bb0-b42b-bbb189f8eb9f/extract-content/0.log" Oct 09 10:13:42 crc kubenswrapper[4710]: I1009 10:13:42.730803 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z7vh5_05aeeec5-723a-4bb0-b42b-bbb189f8eb9f/extract-utilities/0.log" Oct 09 10:13:42 crc kubenswrapper[4710]: I1009 10:13:42.749654 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z7vh5_05aeeec5-723a-4bb0-b42b-bbb189f8eb9f/extract-content/0.log" Oct 09 10:13:42 crc kubenswrapper[4710]: I1009 10:13:42.778936 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z7vh5_05aeeec5-723a-4bb0-b42b-bbb189f8eb9f/registry-server/0.log" Oct 09 10:13:42 crc kubenswrapper[4710]: I1009 10:13:42.920869 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vqq8h_19faa1c0-84d5-4b45-969b-c9524eee6e56/extract-utilities/0.log" Oct 09 10:13:43 crc kubenswrapper[4710]: I1009 10:13:43.052891 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vqq8h_19faa1c0-84d5-4b45-969b-c9524eee6e56/extract-content/0.log" Oct 09 10:13:43 crc kubenswrapper[4710]: I1009 10:13:43.103583 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vqq8h_19faa1c0-84d5-4b45-969b-c9524eee6e56/extract-content/0.log" Oct 09 10:13:43 crc kubenswrapper[4710]: I1009 10:13:43.116386 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vqq8h_19faa1c0-84d5-4b45-969b-c9524eee6e56/extract-utilities/0.log" Oct 09 10:13:43 crc kubenswrapper[4710]: I1009 10:13:43.299935 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vqq8h_19faa1c0-84d5-4b45-969b-c9524eee6e56/extract-content/0.log" Oct 09 10:13:43 crc kubenswrapper[4710]: I1009 10:13:43.318376 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vqq8h_19faa1c0-84d5-4b45-969b-c9524eee6e56/extract-utilities/0.log" Oct 09 10:13:43 crc kubenswrapper[4710]: I1009 10:13:43.549277 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_98d5ef90-e17a-4c38-b665-ad2311e7b3b1/util/0.log" Oct 09 10:13:43 crc kubenswrapper[4710]: I1009 10:13:43.847420 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_98d5ef90-e17a-4c38-b665-ad2311e7b3b1/pull/0.log" Oct 09 10:13:43 crc kubenswrapper[4710]: I1009 10:13:43.853094 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vqq8h_19faa1c0-84d5-4b45-969b-c9524eee6e56/registry-server/0.log" Oct 09 10:13:43 crc kubenswrapper[4710]: I1009 10:13:43.872693 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_98d5ef90-e17a-4c38-b665-ad2311e7b3b1/pull/0.log" Oct 09 10:13:43 crc kubenswrapper[4710]: I1009 10:13:43.898892 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_98d5ef90-e17a-4c38-b665-ad2311e7b3b1/util/0.log" Oct 09 10:13:44 crc kubenswrapper[4710]: I1009 10:13:44.095333 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_98d5ef90-e17a-4c38-b665-ad2311e7b3b1/pull/0.log" Oct 09 10:13:44 crc kubenswrapper[4710]: I1009 10:13:44.124937 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_98d5ef90-e17a-4c38-b665-ad2311e7b3b1/util/0.log" Oct 09 10:13:44 crc kubenswrapper[4710]: I1009 10:13:44.136014 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835c7nfsp_98d5ef90-e17a-4c38-b665-ad2311e7b3b1/extract/0.log" Oct 09 10:13:44 crc kubenswrapper[4710]: I1009 10:13:44.140665 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-mf25c_0f52acde-7961-4866-8e50-2d6839085e4d/marketplace-operator/0.log" Oct 09 10:13:44 crc kubenswrapper[4710]: I1009 10:13:44.322717 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rtjsg_b6750787-1707-42c3-9e91-949cfcf33699/extract-utilities/0.log" Oct 09 10:13:44 crc kubenswrapper[4710]: I1009 10:13:44.436264 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rtjsg_b6750787-1707-42c3-9e91-949cfcf33699/extract-content/0.log" Oct 09 10:13:44 crc kubenswrapper[4710]: I1009 10:13:44.450383 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rtjsg_b6750787-1707-42c3-9e91-949cfcf33699/extract-utilities/0.log" Oct 09 10:13:44 crc kubenswrapper[4710]: I1009 10:13:44.476154 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rtjsg_b6750787-1707-42c3-9e91-949cfcf33699/extract-content/0.log" Oct 09 10:13:44 crc kubenswrapper[4710]: I1009 10:13:44.610724 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rtjsg_b6750787-1707-42c3-9e91-949cfcf33699/extract-utilities/0.log" Oct 09 10:13:44 crc kubenswrapper[4710]: I1009 10:13:44.691587 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rtjsg_b6750787-1707-42c3-9e91-949cfcf33699/extract-content/0.log" Oct 09 10:13:44 crc kubenswrapper[4710]: I1009 10:13:44.729220 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-97w7k_9cbe3f65-19a3-4145-94a8-4434ee92178f/extract-utilities/0.log" Oct 09 10:13:44 crc kubenswrapper[4710]: I1009 10:13:44.811969 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rtjsg_b6750787-1707-42c3-9e91-949cfcf33699/registry-server/0.log" Oct 09 10:13:44 crc kubenswrapper[4710]: I1009 10:13:44.921697 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-97w7k_9cbe3f65-19a3-4145-94a8-4434ee92178f/extract-utilities/0.log" Oct 09 10:13:45 crc kubenswrapper[4710]: I1009 10:13:45.153923 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-97w7k_9cbe3f65-19a3-4145-94a8-4434ee92178f/extract-content/0.log" Oct 09 10:13:45 crc kubenswrapper[4710]: I1009 10:13:45.193634 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-97w7k_9cbe3f65-19a3-4145-94a8-4434ee92178f/extract-content/0.log" Oct 09 10:13:45 crc kubenswrapper[4710]: I1009 10:13:45.353588 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-97w7k_9cbe3f65-19a3-4145-94a8-4434ee92178f/extract-content/0.log" Oct 09 10:13:45 crc kubenswrapper[4710]: I1009 10:13:45.445503 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-97w7k_9cbe3f65-19a3-4145-94a8-4434ee92178f/extract-utilities/0.log" Oct 09 10:13:45 crc kubenswrapper[4710]: I1009 10:13:45.672388 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-z7vh5" Oct 09 10:13:45 crc kubenswrapper[4710]: I1009 10:13:45.673259 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-z7vh5" Oct 09 10:13:45 crc kubenswrapper[4710]: I1009 10:13:45.737235 4710 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-z7vh5" Oct 09 10:13:45 crc kubenswrapper[4710]: I1009 10:13:45.840452 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-97w7k_9cbe3f65-19a3-4145-94a8-4434ee92178f/registry-server/0.log" Oct 09 10:13:46 crc kubenswrapper[4710]: I1009 10:13:46.651818 4710 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-z7vh5" Oct 09 10:13:46 crc kubenswrapper[4710]: I1009 10:13:46.707830 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z7vh5"] Oct 09 10:13:48 crc kubenswrapper[4710]: I1009 10:13:48.625544 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-z7vh5" podUID="05aeeec5-723a-4bb0-b42b-bbb189f8eb9f" containerName="registry-server" containerID="cri-o://11d7ba0dff345d6da4c57ec43616aa4644be552625843fc04b5ef9889b9c816c" gracePeriod=2 Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.204889 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z7vh5" Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.283153 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5tthg\" (UniqueName: \"kubernetes.io/projected/05aeeec5-723a-4bb0-b42b-bbb189f8eb9f-kube-api-access-5tthg\") pod \"05aeeec5-723a-4bb0-b42b-bbb189f8eb9f\" (UID: \"05aeeec5-723a-4bb0-b42b-bbb189f8eb9f\") " Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.283353 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05aeeec5-723a-4bb0-b42b-bbb189f8eb9f-utilities\") pod \"05aeeec5-723a-4bb0-b42b-bbb189f8eb9f\" (UID: \"05aeeec5-723a-4bb0-b42b-bbb189f8eb9f\") " Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.283528 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05aeeec5-723a-4bb0-b42b-bbb189f8eb9f-catalog-content\") pod \"05aeeec5-723a-4bb0-b42b-bbb189f8eb9f\" (UID: \"05aeeec5-723a-4bb0-b42b-bbb189f8eb9f\") " Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.283995 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05aeeec5-723a-4bb0-b42b-bbb189f8eb9f-utilities" (OuterVolumeSpecName: "utilities") pod "05aeeec5-723a-4bb0-b42b-bbb189f8eb9f" (UID: "05aeeec5-723a-4bb0-b42b-bbb189f8eb9f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.285230 4710 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05aeeec5-723a-4bb0-b42b-bbb189f8eb9f-utilities\") on node \"crc\" DevicePath \"\"" Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.295935 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05aeeec5-723a-4bb0-b42b-bbb189f8eb9f-kube-api-access-5tthg" (OuterVolumeSpecName: "kube-api-access-5tthg") pod "05aeeec5-723a-4bb0-b42b-bbb189f8eb9f" (UID: "05aeeec5-723a-4bb0-b42b-bbb189f8eb9f"). InnerVolumeSpecName "kube-api-access-5tthg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.320406 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05aeeec5-723a-4bb0-b42b-bbb189f8eb9f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "05aeeec5-723a-4bb0-b42b-bbb189f8eb9f" (UID: "05aeeec5-723a-4bb0-b42b-bbb189f8eb9f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.387643 4710 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05aeeec5-723a-4bb0-b42b-bbb189f8eb9f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.387676 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5tthg\" (UniqueName: \"kubernetes.io/projected/05aeeec5-723a-4bb0-b42b-bbb189f8eb9f-kube-api-access-5tthg\") on node \"crc\" DevicePath \"\"" Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.635655 4710 generic.go:334] "Generic (PLEG): container finished" podID="05aeeec5-723a-4bb0-b42b-bbb189f8eb9f" containerID="11d7ba0dff345d6da4c57ec43616aa4644be552625843fc04b5ef9889b9c816c" exitCode=0 Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.635696 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z7vh5" event={"ID":"05aeeec5-723a-4bb0-b42b-bbb189f8eb9f","Type":"ContainerDied","Data":"11d7ba0dff345d6da4c57ec43616aa4644be552625843fc04b5ef9889b9c816c"} Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.635739 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z7vh5" event={"ID":"05aeeec5-723a-4bb0-b42b-bbb189f8eb9f","Type":"ContainerDied","Data":"28011523eb1532102826ac5b8b66df89716429202e3c19e0413e10f9e4a295b2"} Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.635758 4710 scope.go:117] "RemoveContainer" containerID="11d7ba0dff345d6da4c57ec43616aa4644be552625843fc04b5ef9889b9c816c" Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.635904 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z7vh5" Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.674312 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z7vh5"] Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.675856 4710 scope.go:117] "RemoveContainer" containerID="f4f263308f983c33d84b96b243a6c8a9d51923baa935b452add943fb29ce86ad" Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.684524 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-z7vh5"] Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.712747 4710 scope.go:117] "RemoveContainer" containerID="b044d3af8bdf10d85ac913b29d566ab0ab67154c4b08b55d8597f26a0cbcc628" Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.752568 4710 scope.go:117] "RemoveContainer" containerID="11d7ba0dff345d6da4c57ec43616aa4644be552625843fc04b5ef9889b9c816c" Oct 09 10:13:49 crc kubenswrapper[4710]: E1009 10:13:49.753045 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11d7ba0dff345d6da4c57ec43616aa4644be552625843fc04b5ef9889b9c816c\": container with ID starting with 11d7ba0dff345d6da4c57ec43616aa4644be552625843fc04b5ef9889b9c816c not found: ID does not exist" containerID="11d7ba0dff345d6da4c57ec43616aa4644be552625843fc04b5ef9889b9c816c" Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.753086 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11d7ba0dff345d6da4c57ec43616aa4644be552625843fc04b5ef9889b9c816c"} err="failed to get container status \"11d7ba0dff345d6da4c57ec43616aa4644be552625843fc04b5ef9889b9c816c\": rpc error: code = NotFound desc = could not find container \"11d7ba0dff345d6da4c57ec43616aa4644be552625843fc04b5ef9889b9c816c\": container with ID starting with 11d7ba0dff345d6da4c57ec43616aa4644be552625843fc04b5ef9889b9c816c not found: ID does not exist" Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.753114 4710 scope.go:117] "RemoveContainer" containerID="f4f263308f983c33d84b96b243a6c8a9d51923baa935b452add943fb29ce86ad" Oct 09 10:13:49 crc kubenswrapper[4710]: E1009 10:13:49.753644 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4f263308f983c33d84b96b243a6c8a9d51923baa935b452add943fb29ce86ad\": container with ID starting with f4f263308f983c33d84b96b243a6c8a9d51923baa935b452add943fb29ce86ad not found: ID does not exist" containerID="f4f263308f983c33d84b96b243a6c8a9d51923baa935b452add943fb29ce86ad" Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.753695 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4f263308f983c33d84b96b243a6c8a9d51923baa935b452add943fb29ce86ad"} err="failed to get container status \"f4f263308f983c33d84b96b243a6c8a9d51923baa935b452add943fb29ce86ad\": rpc error: code = NotFound desc = could not find container \"f4f263308f983c33d84b96b243a6c8a9d51923baa935b452add943fb29ce86ad\": container with ID starting with f4f263308f983c33d84b96b243a6c8a9d51923baa935b452add943fb29ce86ad not found: ID does not exist" Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.753718 4710 scope.go:117] "RemoveContainer" containerID="b044d3af8bdf10d85ac913b29d566ab0ab67154c4b08b55d8597f26a0cbcc628" Oct 09 10:13:49 crc kubenswrapper[4710]: E1009 10:13:49.754040 4710 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b044d3af8bdf10d85ac913b29d566ab0ab67154c4b08b55d8597f26a0cbcc628\": container with ID starting with b044d3af8bdf10d85ac913b29d566ab0ab67154c4b08b55d8597f26a0cbcc628 not found: ID does not exist" containerID="b044d3af8bdf10d85ac913b29d566ab0ab67154c4b08b55d8597f26a0cbcc628" Oct 09 10:13:49 crc kubenswrapper[4710]: I1009 10:13:49.754072 4710 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b044d3af8bdf10d85ac913b29d566ab0ab67154c4b08b55d8597f26a0cbcc628"} err="failed to get container status \"b044d3af8bdf10d85ac913b29d566ab0ab67154c4b08b55d8597f26a0cbcc628\": rpc error: code = NotFound desc = could not find container \"b044d3af8bdf10d85ac913b29d566ab0ab67154c4b08b55d8597f26a0cbcc628\": container with ID starting with b044d3af8bdf10d85ac913b29d566ab0ab67154c4b08b55d8597f26a0cbcc628 not found: ID does not exist" Oct 09 10:13:50 crc kubenswrapper[4710]: I1009 10:13:50.823487 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05aeeec5-723a-4bb0-b42b-bbb189f8eb9f" path="/var/lib/kubelet/pods/05aeeec5-723a-4bb0-b42b-bbb189f8eb9f/volumes" Oct 09 10:15:00 crc kubenswrapper[4710]: I1009 10:15:00.140804 4710 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333415-pgs5z"] Oct 09 10:15:00 crc kubenswrapper[4710]: E1009 10:15:00.142255 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05aeeec5-723a-4bb0-b42b-bbb189f8eb9f" containerName="extract-content" Oct 09 10:15:00 crc kubenswrapper[4710]: I1009 10:15:00.142278 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="05aeeec5-723a-4bb0-b42b-bbb189f8eb9f" containerName="extract-content" Oct 09 10:15:00 crc kubenswrapper[4710]: E1009 10:15:00.142292 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05aeeec5-723a-4bb0-b42b-bbb189f8eb9f" containerName="registry-server" Oct 09 10:15:00 crc kubenswrapper[4710]: I1009 10:15:00.142298 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="05aeeec5-723a-4bb0-b42b-bbb189f8eb9f" containerName="registry-server" Oct 09 10:15:00 crc kubenswrapper[4710]: E1009 10:15:00.142326 4710 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05aeeec5-723a-4bb0-b42b-bbb189f8eb9f" containerName="extract-utilities" Oct 09 10:15:00 crc kubenswrapper[4710]: I1009 10:15:00.142333 4710 state_mem.go:107] "Deleted CPUSet assignment" podUID="05aeeec5-723a-4bb0-b42b-bbb189f8eb9f" containerName="extract-utilities" Oct 09 10:15:00 crc kubenswrapper[4710]: I1009 10:15:00.142586 4710 memory_manager.go:354] "RemoveStaleState removing state" podUID="05aeeec5-723a-4bb0-b42b-bbb189f8eb9f" containerName="registry-server" Oct 09 10:15:00 crc kubenswrapper[4710]: I1009 10:15:00.143195 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333415-pgs5z" Oct 09 10:15:00 crc kubenswrapper[4710]: I1009 10:15:00.145141 4710 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 09 10:15:00 crc kubenswrapper[4710]: I1009 10:15:00.145331 4710 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 09 10:15:00 crc kubenswrapper[4710]: I1009 10:15:00.149286 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333415-pgs5z"] Oct 09 10:15:00 crc kubenswrapper[4710]: I1009 10:15:00.237118 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ffe1c796-c969-4397-9206-e4788fe008e3-config-volume\") pod \"collect-profiles-29333415-pgs5z\" (UID: \"ffe1c796-c969-4397-9206-e4788fe008e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333415-pgs5z" Oct 09 10:15:00 crc kubenswrapper[4710]: I1009 10:15:00.237187 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxf49\" (UniqueName: \"kubernetes.io/projected/ffe1c796-c969-4397-9206-e4788fe008e3-kube-api-access-wxf49\") pod \"collect-profiles-29333415-pgs5z\" (UID: \"ffe1c796-c969-4397-9206-e4788fe008e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333415-pgs5z" Oct 09 10:15:00 crc kubenswrapper[4710]: I1009 10:15:00.237302 4710 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ffe1c796-c969-4397-9206-e4788fe008e3-secret-volume\") pod \"collect-profiles-29333415-pgs5z\" (UID: \"ffe1c796-c969-4397-9206-e4788fe008e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333415-pgs5z" Oct 09 10:15:00 crc kubenswrapper[4710]: I1009 10:15:00.339255 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ffe1c796-c969-4397-9206-e4788fe008e3-secret-volume\") pod \"collect-profiles-29333415-pgs5z\" (UID: \"ffe1c796-c969-4397-9206-e4788fe008e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333415-pgs5z" Oct 09 10:15:00 crc kubenswrapper[4710]: I1009 10:15:00.339314 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ffe1c796-c969-4397-9206-e4788fe008e3-config-volume\") pod \"collect-profiles-29333415-pgs5z\" (UID: \"ffe1c796-c969-4397-9206-e4788fe008e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333415-pgs5z" Oct 09 10:15:00 crc kubenswrapper[4710]: I1009 10:15:00.339386 4710 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxf49\" (UniqueName: \"kubernetes.io/projected/ffe1c796-c969-4397-9206-e4788fe008e3-kube-api-access-wxf49\") pod \"collect-profiles-29333415-pgs5z\" (UID: \"ffe1c796-c969-4397-9206-e4788fe008e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333415-pgs5z" Oct 09 10:15:00 crc kubenswrapper[4710]: I1009 10:15:00.340187 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ffe1c796-c969-4397-9206-e4788fe008e3-config-volume\") pod \"collect-profiles-29333415-pgs5z\" (UID: \"ffe1c796-c969-4397-9206-e4788fe008e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333415-pgs5z" Oct 09 10:15:00 crc kubenswrapper[4710]: I1009 10:15:00.345212 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ffe1c796-c969-4397-9206-e4788fe008e3-secret-volume\") pod \"collect-profiles-29333415-pgs5z\" (UID: \"ffe1c796-c969-4397-9206-e4788fe008e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333415-pgs5z" Oct 09 10:15:00 crc kubenswrapper[4710]: I1009 10:15:00.358366 4710 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxf49\" (UniqueName: \"kubernetes.io/projected/ffe1c796-c969-4397-9206-e4788fe008e3-kube-api-access-wxf49\") pod \"collect-profiles-29333415-pgs5z\" (UID: \"ffe1c796-c969-4397-9206-e4788fe008e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29333415-pgs5z" Oct 09 10:15:00 crc kubenswrapper[4710]: I1009 10:15:00.457051 4710 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333415-pgs5z" Oct 09 10:15:00 crc kubenswrapper[4710]: I1009 10:15:00.839513 4710 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333415-pgs5z"] Oct 09 10:15:01 crc kubenswrapper[4710]: I1009 10:15:01.167970 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333415-pgs5z" event={"ID":"ffe1c796-c969-4397-9206-e4788fe008e3","Type":"ContainerStarted","Data":"34bc6bd3ba2a6cab8bb2f9b2f9698fcfa8a144b2705737b23314da2db8c04a72"} Oct 09 10:15:01 crc kubenswrapper[4710]: I1009 10:15:01.168015 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333415-pgs5z" event={"ID":"ffe1c796-c969-4397-9206-e4788fe008e3","Type":"ContainerStarted","Data":"f56a787caefa12c71161a1ada32063f6759eef631429ab184f926f40fd819279"} Oct 09 10:15:01 crc kubenswrapper[4710]: I1009 10:15:01.185643 4710 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29333415-pgs5z" podStartSLOduration=1.185632281 podStartE2EDuration="1.185632281s" podCreationTimestamp="2025-10-09 10:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-09 10:15:01.179009063 +0000 UTC m=+4224.669117460" watchObservedRunningTime="2025-10-09 10:15:01.185632281 +0000 UTC m=+4224.675740679" Oct 09 10:15:02 crc kubenswrapper[4710]: I1009 10:15:02.175919 4710 generic.go:334] "Generic (PLEG): container finished" podID="ffe1c796-c969-4397-9206-e4788fe008e3" containerID="34bc6bd3ba2a6cab8bb2f9b2f9698fcfa8a144b2705737b23314da2db8c04a72" exitCode=0 Oct 09 10:15:02 crc kubenswrapper[4710]: I1009 10:15:02.175974 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333415-pgs5z" event={"ID":"ffe1c796-c969-4397-9206-e4788fe008e3","Type":"ContainerDied","Data":"34bc6bd3ba2a6cab8bb2f9b2f9698fcfa8a144b2705737b23314da2db8c04a72"} Oct 09 10:15:03 crc kubenswrapper[4710]: I1009 10:15:03.761049 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333415-pgs5z" Oct 09 10:15:03 crc kubenswrapper[4710]: I1009 10:15:03.794682 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ffe1c796-c969-4397-9206-e4788fe008e3-secret-volume\") pod \"ffe1c796-c969-4397-9206-e4788fe008e3\" (UID: \"ffe1c796-c969-4397-9206-e4788fe008e3\") " Oct 09 10:15:03 crc kubenswrapper[4710]: I1009 10:15:03.794742 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ffe1c796-c969-4397-9206-e4788fe008e3-config-volume\") pod \"ffe1c796-c969-4397-9206-e4788fe008e3\" (UID: \"ffe1c796-c969-4397-9206-e4788fe008e3\") " Oct 09 10:15:03 crc kubenswrapper[4710]: I1009 10:15:03.794785 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxf49\" (UniqueName: \"kubernetes.io/projected/ffe1c796-c969-4397-9206-e4788fe008e3-kube-api-access-wxf49\") pod \"ffe1c796-c969-4397-9206-e4788fe008e3\" (UID: \"ffe1c796-c969-4397-9206-e4788fe008e3\") " Oct 09 10:15:03 crc kubenswrapper[4710]: I1009 10:15:03.795754 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffe1c796-c969-4397-9206-e4788fe008e3-config-volume" (OuterVolumeSpecName: "config-volume") pod "ffe1c796-c969-4397-9206-e4788fe008e3" (UID: "ffe1c796-c969-4397-9206-e4788fe008e3"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 09 10:15:03 crc kubenswrapper[4710]: I1009 10:15:03.799765 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffe1c796-c969-4397-9206-e4788fe008e3-kube-api-access-wxf49" (OuterVolumeSpecName: "kube-api-access-wxf49") pod "ffe1c796-c969-4397-9206-e4788fe008e3" (UID: "ffe1c796-c969-4397-9206-e4788fe008e3"). InnerVolumeSpecName "kube-api-access-wxf49". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 10:15:03 crc kubenswrapper[4710]: I1009 10:15:03.812330 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffe1c796-c969-4397-9206-e4788fe008e3-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ffe1c796-c969-4397-9206-e4788fe008e3" (UID: "ffe1c796-c969-4397-9206-e4788fe008e3"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 09 10:15:03 crc kubenswrapper[4710]: I1009 10:15:03.897311 4710 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ffe1c796-c969-4397-9206-e4788fe008e3-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 09 10:15:03 crc kubenswrapper[4710]: I1009 10:15:03.897334 4710 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ffe1c796-c969-4397-9206-e4788fe008e3-config-volume\") on node \"crc\" DevicePath \"\"" Oct 09 10:15:03 crc kubenswrapper[4710]: I1009 10:15:03.897354 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxf49\" (UniqueName: \"kubernetes.io/projected/ffe1c796-c969-4397-9206-e4788fe008e3-kube-api-access-wxf49\") on node \"crc\" DevicePath \"\"" Oct 09 10:15:04 crc kubenswrapper[4710]: I1009 10:15:04.194466 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29333415-pgs5z" event={"ID":"ffe1c796-c969-4397-9206-e4788fe008e3","Type":"ContainerDied","Data":"f56a787caefa12c71161a1ada32063f6759eef631429ab184f926f40fd819279"} Oct 09 10:15:04 crc kubenswrapper[4710]: I1009 10:15:04.194751 4710 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f56a787caefa12c71161a1ada32063f6759eef631429ab184f926f40fd819279" Oct 09 10:15:04 crc kubenswrapper[4710]: I1009 10:15:04.194572 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29333415-pgs5z" Oct 09 10:15:04 crc kubenswrapper[4710]: I1009 10:15:04.252450 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333370-6fvxf"] Oct 09 10:15:04 crc kubenswrapper[4710]: I1009 10:15:04.252756 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29333370-6fvxf"] Oct 09 10:15:04 crc kubenswrapper[4710]: I1009 10:15:04.823142 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2a59650-de3e-43c2-8628-27df66314464" path="/var/lib/kubelet/pods/a2a59650-de3e-43c2-8628-27df66314464/volumes" Oct 09 10:15:05 crc kubenswrapper[4710]: I1009 10:15:05.546192 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 10:15:05 crc kubenswrapper[4710]: I1009 10:15:05.546551 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 10:15:35 crc kubenswrapper[4710]: I1009 10:15:35.546117 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 10:15:35 crc kubenswrapper[4710]: I1009 10:15:35.546666 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 10:15:40 crc kubenswrapper[4710]: I1009 10:15:40.461462 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bvn9v/must-gather-7xvz7" event={"ID":"3c64da38-182d-4f50-bbc4-d97effae13c4","Type":"ContainerDied","Data":"fa46ca85dc15060df8a048fa3a55f9ae3a5b5baf9dc6ba23c3502625111e290b"} Oct 09 10:15:40 crc kubenswrapper[4710]: I1009 10:15:40.461417 4710 generic.go:334] "Generic (PLEG): container finished" podID="3c64da38-182d-4f50-bbc4-d97effae13c4" containerID="fa46ca85dc15060df8a048fa3a55f9ae3a5b5baf9dc6ba23c3502625111e290b" exitCode=0 Oct 09 10:15:40 crc kubenswrapper[4710]: I1009 10:15:40.462836 4710 scope.go:117] "RemoveContainer" containerID="fa46ca85dc15060df8a048fa3a55f9ae3a5b5baf9dc6ba23c3502625111e290b" Oct 09 10:15:40 crc kubenswrapper[4710]: I1009 10:15:40.597831 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-bvn9v_must-gather-7xvz7_3c64da38-182d-4f50-bbc4-d97effae13c4/gather/0.log" Oct 09 10:15:51 crc kubenswrapper[4710]: I1009 10:15:51.206883 4710 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-bvn9v/must-gather-7xvz7"] Oct 09 10:15:51 crc kubenswrapper[4710]: I1009 10:15:51.207849 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-bvn9v/must-gather-7xvz7" podUID="3c64da38-182d-4f50-bbc4-d97effae13c4" containerName="copy" containerID="cri-o://86b4bfca505259000838f9cb023f1426401b0bb7b8e496a4160b4ac146c76208" gracePeriod=2 Oct 09 10:15:51 crc kubenswrapper[4710]: I1009 10:15:51.226101 4710 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-bvn9v/must-gather-7xvz7"] Oct 09 10:15:51 crc kubenswrapper[4710]: I1009 10:15:51.569923 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-bvn9v_must-gather-7xvz7_3c64da38-182d-4f50-bbc4-d97effae13c4/copy/0.log" Oct 09 10:15:51 crc kubenswrapper[4710]: I1009 10:15:51.570887 4710 generic.go:334] "Generic (PLEG): container finished" podID="3c64da38-182d-4f50-bbc4-d97effae13c4" containerID="86b4bfca505259000838f9cb023f1426401b0bb7b8e496a4160b4ac146c76208" exitCode=143 Oct 09 10:15:51 crc kubenswrapper[4710]: I1009 10:15:51.918884 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-bvn9v_must-gather-7xvz7_3c64da38-182d-4f50-bbc4-d97effae13c4/copy/0.log" Oct 09 10:15:51 crc kubenswrapper[4710]: I1009 10:15:51.919680 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bvn9v/must-gather-7xvz7" Oct 09 10:15:52 crc kubenswrapper[4710]: I1009 10:15:52.008780 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7g6pw\" (UniqueName: \"kubernetes.io/projected/3c64da38-182d-4f50-bbc4-d97effae13c4-kube-api-access-7g6pw\") pod \"3c64da38-182d-4f50-bbc4-d97effae13c4\" (UID: \"3c64da38-182d-4f50-bbc4-d97effae13c4\") " Oct 09 10:15:52 crc kubenswrapper[4710]: I1009 10:15:52.009027 4710 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3c64da38-182d-4f50-bbc4-d97effae13c4-must-gather-output\") pod \"3c64da38-182d-4f50-bbc4-d97effae13c4\" (UID: \"3c64da38-182d-4f50-bbc4-d97effae13c4\") " Oct 09 10:15:52 crc kubenswrapper[4710]: I1009 10:15:52.084649 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c64da38-182d-4f50-bbc4-d97effae13c4-kube-api-access-7g6pw" (OuterVolumeSpecName: "kube-api-access-7g6pw") pod "3c64da38-182d-4f50-bbc4-d97effae13c4" (UID: "3c64da38-182d-4f50-bbc4-d97effae13c4"). InnerVolumeSpecName "kube-api-access-7g6pw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 09 10:15:52 crc kubenswrapper[4710]: I1009 10:15:52.113908 4710 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7g6pw\" (UniqueName: \"kubernetes.io/projected/3c64da38-182d-4f50-bbc4-d97effae13c4-kube-api-access-7g6pw\") on node \"crc\" DevicePath \"\"" Oct 09 10:15:52 crc kubenswrapper[4710]: I1009 10:15:52.155585 4710 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c64da38-182d-4f50-bbc4-d97effae13c4-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "3c64da38-182d-4f50-bbc4-d97effae13c4" (UID: "3c64da38-182d-4f50-bbc4-d97effae13c4"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 09 10:15:52 crc kubenswrapper[4710]: I1009 10:15:52.216623 4710 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/3c64da38-182d-4f50-bbc4-d97effae13c4-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 09 10:15:52 crc kubenswrapper[4710]: I1009 10:15:52.578459 4710 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-bvn9v_must-gather-7xvz7_3c64da38-182d-4f50-bbc4-d97effae13c4/copy/0.log" Oct 09 10:15:52 crc kubenswrapper[4710]: I1009 10:15:52.578776 4710 scope.go:117] "RemoveContainer" containerID="86b4bfca505259000838f9cb023f1426401b0bb7b8e496a4160b4ac146c76208" Oct 09 10:15:52 crc kubenswrapper[4710]: I1009 10:15:52.578880 4710 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bvn9v/must-gather-7xvz7" Oct 09 10:15:52 crc kubenswrapper[4710]: I1009 10:15:52.614912 4710 scope.go:117] "RemoveContainer" containerID="fa46ca85dc15060df8a048fa3a55f9ae3a5b5baf9dc6ba23c3502625111e290b" Oct 09 10:15:52 crc kubenswrapper[4710]: I1009 10:15:52.822666 4710 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c64da38-182d-4f50-bbc4-d97effae13c4" path="/var/lib/kubelet/pods/3c64da38-182d-4f50-bbc4-d97effae13c4/volumes" Oct 09 10:15:53 crc kubenswrapper[4710]: I1009 10:15:53.271698 4710 scope.go:117] "RemoveContainer" containerID="2fa1f65ea5d29d0252dad6caf7a1e9a1f9f483a13afd71364994b3ca1c6bac5e" Oct 09 10:16:05 crc kubenswrapper[4710]: I1009 10:16:05.545475 4710 patch_prober.go:28] interesting pod/machine-config-daemon-fzkfm container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 09 10:16:05 crc kubenswrapper[4710]: I1009 10:16:05.546154 4710 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 09 10:16:05 crc kubenswrapper[4710]: I1009 10:16:05.546202 4710 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" Oct 09 10:16:05 crc kubenswrapper[4710]: I1009 10:16:05.547052 4710 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6f37f9bd48e2f76920234e0d8c3450eecdb8757df8b6ad92d401c06c81106d59"} pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 09 10:16:05 crc kubenswrapper[4710]: I1009 10:16:05.547110 4710 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerName="machine-config-daemon" containerID="cri-o://6f37f9bd48e2f76920234e0d8c3450eecdb8757df8b6ad92d401c06c81106d59" gracePeriod=600 Oct 09 10:16:05 crc kubenswrapper[4710]: E1009 10:16:05.666160 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:16:05 crc kubenswrapper[4710]: I1009 10:16:05.696982 4710 generic.go:334] "Generic (PLEG): container finished" podID="f676b5cb-d273-4cac-85de-23ca7b6151b6" containerID="6f37f9bd48e2f76920234e0d8c3450eecdb8757df8b6ad92d401c06c81106d59" exitCode=0 Oct 09 10:16:05 crc kubenswrapper[4710]: I1009 10:16:05.697047 4710 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" event={"ID":"f676b5cb-d273-4cac-85de-23ca7b6151b6","Type":"ContainerDied","Data":"6f37f9bd48e2f76920234e0d8c3450eecdb8757df8b6ad92d401c06c81106d59"} Oct 09 10:16:05 crc kubenswrapper[4710]: I1009 10:16:05.697085 4710 scope.go:117] "RemoveContainer" containerID="4f3bd091b62a5d6a8bcde1dece4c98d7c1dbcf0291047b04b5db8b6a2857e401" Oct 09 10:16:05 crc kubenswrapper[4710]: I1009 10:16:05.701281 4710 scope.go:117] "RemoveContainer" containerID="6f37f9bd48e2f76920234e0d8c3450eecdb8757df8b6ad92d401c06c81106d59" Oct 09 10:16:05 crc kubenswrapper[4710]: E1009 10:16:05.701775 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:16:18 crc kubenswrapper[4710]: I1009 10:16:18.815252 4710 scope.go:117] "RemoveContainer" containerID="6f37f9bd48e2f76920234e0d8c3450eecdb8757df8b6ad92d401c06c81106d59" Oct 09 10:16:18 crc kubenswrapper[4710]: E1009 10:16:18.816044 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:16:31 crc kubenswrapper[4710]: I1009 10:16:31.815796 4710 scope.go:117] "RemoveContainer" containerID="6f37f9bd48e2f76920234e0d8c3450eecdb8757df8b6ad92d401c06c81106d59" Oct 09 10:16:31 crc kubenswrapper[4710]: E1009 10:16:31.817301 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:16:45 crc kubenswrapper[4710]: I1009 10:16:45.815222 4710 scope.go:117] "RemoveContainer" containerID="6f37f9bd48e2f76920234e0d8c3450eecdb8757df8b6ad92d401c06c81106d59" Oct 09 10:16:45 crc kubenswrapper[4710]: E1009 10:16:45.816015 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:16:53 crc kubenswrapper[4710]: I1009 10:16:53.335089 4710 scope.go:117] "RemoveContainer" containerID="20b08aa19bf9f6143cebd648de97adcf3f0365408503554a7d482014b0852072" Oct 09 10:16:58 crc kubenswrapper[4710]: I1009 10:16:58.815347 4710 scope.go:117] "RemoveContainer" containerID="6f37f9bd48e2f76920234e0d8c3450eecdb8757df8b6ad92d401c06c81106d59" Oct 09 10:16:58 crc kubenswrapper[4710]: E1009 10:16:58.816513 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:17:09 crc kubenswrapper[4710]: I1009 10:17:09.815262 4710 scope.go:117] "RemoveContainer" containerID="6f37f9bd48e2f76920234e0d8c3450eecdb8757df8b6ad92d401c06c81106d59" Oct 09 10:17:09 crc kubenswrapper[4710]: E1009 10:17:09.816002 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" Oct 09 10:17:22 crc kubenswrapper[4710]: I1009 10:17:22.815525 4710 scope.go:117] "RemoveContainer" containerID="6f37f9bd48e2f76920234e0d8c3450eecdb8757df8b6ad92d401c06c81106d59" Oct 09 10:17:22 crc kubenswrapper[4710]: E1009 10:17:22.816067 4710 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-fzkfm_openshift-machine-config-operator(f676b5cb-d273-4cac-85de-23ca7b6151b6)\"" pod="openshift-machine-config-operator/machine-config-daemon-fzkfm" podUID="f676b5cb-d273-4cac-85de-23ca7b6151b6" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515071705671024456 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015071705672017374 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015071674737016526 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015071674740015470 5ustar corecore